You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/12/12 14:52:17 UTC

[01/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site b31c1d9b6 -> d702fb719


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.MobRegionDirChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.MobRegionDirChecker.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.MobRegionDirChecker.html
index b13c1bd..dfd5ad8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.MobRegionDirChecker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.MobRegionDirChecker.html
@@ -90,7 +90,7 @@
 <span class="sourceLineNo">082</span>      boolean quarantine) throws IOException {<a name="line.82"></a>
 <span class="sourceLineNo">083</span>    this.conf = conf;<a name="line.83"></a>
 <span class="sourceLineNo">084</span>    this.fs = FileSystem.get(conf);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    this.cacheConf = new CacheConfig(conf);<a name="line.85"></a>
+<span class="sourceLineNo">085</span>    this.cacheConf = CacheConfig.DISABLED;<a name="line.85"></a>
 <span class="sourceLineNo">086</span>    this.executor = executor;<a name="line.86"></a>
 <span class="sourceLineNo">087</span>    this.inQuarantineMode = quarantine;<a name="line.87"></a>
 <span class="sourceLineNo">088</span>  }<a name="line.88"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.RegionDirChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.RegionDirChecker.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.RegionDirChecker.html
index b13c1bd..dfd5ad8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.RegionDirChecker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.RegionDirChecker.html
@@ -90,7 +90,7 @@
 <span class="sourceLineNo">082</span>      boolean quarantine) throws IOException {<a name="line.82"></a>
 <span class="sourceLineNo">083</span>    this.conf = conf;<a name="line.83"></a>
 <span class="sourceLineNo">084</span>    this.fs = FileSystem.get(conf);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    this.cacheConf = new CacheConfig(conf);<a name="line.85"></a>
+<span class="sourceLineNo">085</span>    this.cacheConf = CacheConfig.DISABLED;<a name="line.85"></a>
 <span class="sourceLineNo">086</span>    this.executor = executor;<a name="line.86"></a>
 <span class="sourceLineNo">087</span>    this.inQuarantineMode = quarantine;<a name="line.87"></a>
 <span class="sourceLineNo">088</span>  }<a name="line.88"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html
index b13c1bd..dfd5ad8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.html
@@ -90,7 +90,7 @@
 <span class="sourceLineNo">082</span>      boolean quarantine) throws IOException {<a name="line.82"></a>
 <span class="sourceLineNo">083</span>    this.conf = conf;<a name="line.83"></a>
 <span class="sourceLineNo">084</span>    this.fs = FileSystem.get(conf);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    this.cacheConf = new CacheConfig(conf);<a name="line.85"></a>
+<span class="sourceLineNo">085</span>    this.cacheConf = CacheConfig.DISABLED;<a name="line.85"></a>
 <span class="sourceLineNo">086</span>    this.executor = executor;<a name="line.86"></a>
 <span class="sourceLineNo">087</span>    this.inQuarantineMode = quarantine;<a name="line.87"></a>
 <span class="sourceLineNo">088</span>  }<a name="line.88"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/downloads.html
----------------------------------------------------------------------
diff --git a/downloads.html b/downloads.html
index d32c17d..0bffa73 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase Downloads</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -463,7 +463,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/export_control.html
----------------------------------------------------------------------
diff --git a/export_control.html b/export_control.html
index 1d21cce..d81a697 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Export Control
@@ -341,7 +341,7 @@ for more details.</p>
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/index.html
----------------------------------------------------------------------
diff --git a/index.html b/index.html
index e6ef909..a8fb82b 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase™ Home</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -421,7 +421,7 @@ Apache HBase is an open-source, distributed, versioned, non-relational database
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/integration.html
----------------------------------------------------------------------
diff --git a/integration.html b/integration.html
index 91e9b6d..f8d0280 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; CI Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -301,7 +301,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/issue-tracking.html
----------------------------------------------------------------------
diff --git a/issue-tracking.html b/issue-tracking.html
index 1c54f42..1147539 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Issue Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -298,7 +298,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/license.html
----------------------------------------------------------------------
diff --git a/license.html b/license.html
index e7d0d4d..b1c7651 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Licenses</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -501,7 +501,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/mail-lists.html
----------------------------------------------------------------------
diff --git a/mail-lists.html b/mail-lists.html
index 3c268a6..122f57a 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Mailing Lists</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -351,7 +351,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/metrics.html
----------------------------------------------------------------------
diff --git a/metrics.html b/metrics.html
index 65ffe80..0a20795 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) Metrics
@@ -469,7 +469,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/old_news.html
----------------------------------------------------------------------
diff --git a/old_news.html b/old_news.html
index 7dca8c6..18910b1 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Old Apache HBase (TM) News
@@ -450,7 +450,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/plugin-management.html
----------------------------------------------------------------------
diff --git a/plugin-management.html b/plugin-management.html
index 1e5d9ee..6902e80 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugin Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -450,7 +450,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/plugins.html
----------------------------------------------------------------------
diff --git a/plugins.html b/plugins.html
index bf87294..3fd8e16 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugins</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -381,7 +381,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/poweredbyhbase.html
----------------------------------------------------------------------
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index d71e40c..31eba53 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Powered By Apache HBase™</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -779,7 +779,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/project-info.html
----------------------------------------------------------------------
diff --git a/project-info.html b/project-info.html
index 6c4d283..153e6b2 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -345,7 +345,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/project-reports.html
----------------------------------------------------------------------
diff --git a/project-reports.html b/project-reports.html
index 736c2e4..2b85605 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Generated Reports</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -315,7 +315,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/project-summary.html
----------------------------------------------------------------------
diff --git a/project-summary.html b/project-summary.html
index e48233e..fd72958 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Summary</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -341,7 +341,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/pseudo-distributed.html
----------------------------------------------------------------------
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index fa5c3b2..c5caf3c 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
 Running Apache HBase (TM) in pseudo-distributed mode
@@ -318,7 +318,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/replication.html
----------------------------------------------------------------------
diff --git a/replication.html b/replication.html
index cac3935..446c96a 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Apache HBase (TM) Replication
@@ -313,7 +313,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/resources.html
----------------------------------------------------------------------
diff --git a/resources.html b/resources.html
index 7ff1b2b..b70f256 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Other Apache HBase (TM) Resources</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -341,7 +341,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/source-repository.html
----------------------------------------------------------------------
diff --git a/source-repository.html b/source-repository.html
index 800cae0..2eeb306 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Source Code Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -309,7 +309,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/sponsors.html
----------------------------------------------------------------------
diff --git a/sponsors.html b/sponsors.html
index 34b5766..faea188 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase™ Sponsors</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -343,7 +343,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/supportingprojects.html
----------------------------------------------------------------------
diff --git a/supportingprojects.html b/supportingprojects.html
index 60da81f..0821c53 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Supporting Projects</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -530,7 +530,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/team-list.html
----------------------------------------------------------------------
diff --git a/team-list.html b/team-list.html
index d7b1968..cdcd426 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Team</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -776,7 +776,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 


[04/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I

<TRUNCATED>

[24/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
index 5ee521a..68dec5a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
@@ -422,572 +422,570 @@
 <span class="sourceLineNo">414</span>        DataBlockEncoding encoding = overriddenEncoding;<a name="line.414"></a>
 <span class="sourceLineNo">415</span>        encoding = encoding == null ? datablockEncodingMap.get(tableAndFamily) : encoding;<a name="line.415"></a>
 <span class="sourceLineNo">416</span>        encoding = encoding == null ? DataBlockEncoding.NONE : encoding;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        Configuration tempConf = new Configuration(conf);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.419"></a>
-<span class="sourceLineNo">420</span>                                    .withCompression(compression)<a name="line.420"></a>
-<span class="sourceLineNo">421</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.421"></a>
-<span class="sourceLineNo">422</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                                    .withBlockSize(blockSize);<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          contextBuilder.withIncludesTags(true);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        if (null == favoredNodes) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          wl.writer =<a name="line.432"></a>
-<span class="sourceLineNo">433</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs)<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        } else {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>          wl.writer =<a name="line.437"></a>
-<span class="sourceLineNo">438</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))<a name="line.438"></a>
-<span class="sourceLineNo">439</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.439"></a>
-<span class="sourceLineNo">440</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.440"></a>
-<span class="sourceLineNo">441</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>        this.writers.put(tableAndFamily, wl);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        return wl;<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        if (w != null) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.450"></a>
-<span class="sourceLineNo">451</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.452"></a>
-<span class="sourceLineNo">453</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>              Bytes.toBytes(true));<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>              Bytes.toBytes(compactionExclude));<a name="line.457"></a>
-<span class="sourceLineNo">458</span>          w.appendTrackedTimestampsToMetadata();<a name="line.458"></a>
-<span class="sourceLineNo">459</span>          w.close();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      @Override<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      public void close(TaskAttemptContext c)<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      throws IOException, InterruptedException {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        for (WriterLength wl: this.writers.values()) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          close(wl.writer);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    };<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
-<span class="sourceLineNo">474</span>   * Configure block storage policy for CF after the directory is created.<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   */<a name="line.475"></a>
-<span class="sourceLineNo">476</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      return;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    String policy =<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.483"></a>
-<span class="sourceLineNo">484</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
-<span class="sourceLineNo">487</span><a name="line.487"></a>
-<span class="sourceLineNo">488</span>  /*<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   */<a name="line.490"></a>
-<span class="sourceLineNo">491</span>  static class WriterLength {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    long written = 0;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    StoreFileWriter writer = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Return the start keys of all of the regions in this table,<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * as a list of ImmutableBytesWritable.<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   */<a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.500"></a>
-<span class="sourceLineNo">501</span>                                                                 boolean writeMultipleTables)<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          throws IOException {<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      TableName tableName = regionLocator.getName();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      for (byte[] byteKey : byteKeys) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        if (writeMultipleTables)<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        {<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          //MultiTableHFileOutputFormat use case<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        if (LOG.isDebugEnabled()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.518"></a>
-<span class="sourceLineNo">519</span>                  (fullKey) + "]");<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    }<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return ret;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("deprecation")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    if (startKeys.isEmpty()) {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>      throw new IllegalArgumentException("No regions passed");<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    }<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // We're generating a list of split points, and we don't ever<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    // empty reducer with index 0<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    ImmutableBytesWritable first = sorted.first();<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    if (writeMultipleTables) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              ().get()));<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      throw new IllegalArgumentException(<a name="line.550"></a>
-<span class="sourceLineNo">551</span>          "First region of table should have empty start key. Instead has: "<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          + Bytes.toStringBinary(first.get()));<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    sorted.remove(sorted.first());<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    // Write the actual file<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      NullWritable.class);<a name="line.560"></a>
-<span class="sourceLineNo">561</span><a name="line.561"></a>
-<span class="sourceLineNo">562</span>    try {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.563"></a>
-<span class="sourceLineNo">564</span>        writer.append(startKey, NullWritable.get());<a name="line.564"></a>
-<span class="sourceLineNo">565</span>      }<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    } finally {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      writer.close();<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  /**<a name="line.571"></a>
-<span class="sourceLineNo">572</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.572"></a>
-<span class="sourceLineNo">573</span>   * table. This<a name="line.573"></a>
-<span class="sourceLineNo">574</span>   * &lt;ul&gt;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   * &lt;/ul&gt;<a name="line.581"></a>
-<span class="sourceLineNo">582</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   * running this function.<a name="line.583"></a>
-<span class="sourceLineNo">584</span>   */<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      throws IOException {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * table. This<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   * &lt;ul&gt;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.596"></a>
-<span class="sourceLineNo">597</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * &lt;/ul&gt;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * running this function.<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
-<span class="sourceLineNo">604</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      RegionLocator regionLocator) throws IOException {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.608"></a>
-<span class="sourceLineNo">609</span>  }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    Configuration conf = job.getConfiguration();<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    job.setOutputFormatClass(cls);<a name="line.616"></a>
-<span class="sourceLineNo">617</span><a name="line.617"></a>
-<span class="sourceLineNo">618</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    }<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    boolean writeMultipleTables = false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      writeMultipleTables = true;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    // sort the incoming values.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>      job.setReducerClass(CellSortReducer.class);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      job.setReducerClass(PutSortReducer.class);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      job.setReducerClass(TextSortReducer.class);<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.637"></a>
-<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
-<span class="sourceLineNo">639</span><a name="line.639"></a>
-<span class="sourceLineNo">640</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        CellSerialization.class.getName());<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    }<a name="line.646"></a>
-<span class="sourceLineNo">647</span><a name="line.647"></a>
-<span class="sourceLineNo">648</span>    /* Now get the region start keys for every table required */<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.651"></a>
-<span class="sourceLineNo">652</span><a name="line.652"></a>
-<span class="sourceLineNo">653</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      String tn = writeMultipleTables?<a name="line.656"></a>
-<span class="sourceLineNo">657</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      allTableNames.add(tn);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.663"></a>
-<span class="sourceLineNo">664</span>            .toString(tableSeparator)));<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // Use table's region boundaries for TOP split points.<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>        "to match current region count for all tables");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    job.setNumReduceTasks(startKeys.size());<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    // Set compression algorithms based on column families<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.674"></a>
+<span class="sourceLineNo">417</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.417"></a>
+<span class="sourceLineNo">418</span>                                    .withCompression(compression)<a name="line.418"></a>
+<span class="sourceLineNo">419</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.419"></a>
+<span class="sourceLineNo">420</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.420"></a>
+<span class="sourceLineNo">421</span>                                    .withBlockSize(blockSize);<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>          contextBuilder.withIncludesTags(true);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>        }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>        if (null == favoredNodes) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>          wl.writer =<a name="line.430"></a>
+<span class="sourceLineNo">431</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, fs)<a name="line.431"></a>
+<span class="sourceLineNo">432</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.432"></a>
+<span class="sourceLineNo">433</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.433"></a>
+<span class="sourceLineNo">434</span>        } else {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>          wl.writer =<a name="line.435"></a>
+<span class="sourceLineNo">436</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, new HFileSystem(fs))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.437"></a>
+<span class="sourceLineNo">438</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.438"></a>
+<span class="sourceLineNo">439</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        }<a name="line.440"></a>
+<span class="sourceLineNo">441</span><a name="line.441"></a>
+<span class="sourceLineNo">442</span>        this.writers.put(tableAndFamily, wl);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        return wl;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>        if (w != null) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.448"></a>
+<span class="sourceLineNo">449</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.450"></a>
+<span class="sourceLineNo">451</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.452"></a>
+<span class="sourceLineNo">453</span>              Bytes.toBytes(true));<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.454"></a>
+<span class="sourceLineNo">455</span>              Bytes.toBytes(compactionExclude));<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          w.appendTrackedTimestampsToMetadata();<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          w.close();<a name="line.457"></a>
+<span class="sourceLineNo">458</span>        }<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>      @Override<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      public void close(TaskAttemptContext c)<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      throws IOException, InterruptedException {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        for (WriterLength wl: this.writers.values()) {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>          close(wl.writer);<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        }<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    };<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * Configure block storage policy for CF after the directory is created.<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.474"></a>
+<span class="sourceLineNo">475</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      return;<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    }<a name="line.478"></a>
+<span class="sourceLineNo">479</span><a name="line.479"></a>
+<span class="sourceLineNo">480</span>    String policy =<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.481"></a>
+<span class="sourceLineNo">482</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /*<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  static class WriterLength {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    long written = 0;<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    StoreFileWriter writer = null;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
+<span class="sourceLineNo">493</span><a name="line.493"></a>
+<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * Return the start keys of all of the regions in this table,<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   * as a list of ImmutableBytesWritable.<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   */<a name="line.497"></a>
+<span class="sourceLineNo">498</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.498"></a>
+<span class="sourceLineNo">499</span>                                                                 boolean writeMultipleTables)<a name="line.499"></a>
+<span class="sourceLineNo">500</span>          throws IOException {<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.503"></a>
+<span class="sourceLineNo">504</span>    {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>      TableName tableName = regionLocator.getName();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      for (byte[] byteKey : byteKeys) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        if (writeMultipleTables)<a name="line.510"></a>
+<span class="sourceLineNo">511</span>        {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>          //MultiTableHFileOutputFormat use case<a name="line.512"></a>
+<span class="sourceLineNo">513</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>        if (LOG.isDebugEnabled()) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.516"></a>
+<span class="sourceLineNo">517</span>                  (fullKey) + "]");<a name="line.517"></a>
+<span class="sourceLineNo">518</span>        }<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.519"></a>
+<span class="sourceLineNo">520</span>      }<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    return ret;<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  }<a name="line.523"></a>
+<span class="sourceLineNo">524</span><a name="line.524"></a>
+<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
+<span class="sourceLineNo">526</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.526"></a>
+<span class="sourceLineNo">527</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.527"></a>
+<span class="sourceLineNo">528</span>   */<a name="line.528"></a>
+<span class="sourceLineNo">529</span>  @SuppressWarnings("deprecation")<a name="line.529"></a>
+<span class="sourceLineNo">530</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>    if (startKeys.isEmpty()) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>      throw new IllegalArgumentException("No regions passed");<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
+<span class="sourceLineNo">536</span><a name="line.536"></a>
+<span class="sourceLineNo">537</span>    // We're generating a list of split points, and we don't ever<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    // empty reducer with index 0<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    ImmutableBytesWritable first = sorted.first();<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    if (writeMultipleTables) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.544"></a>
+<span class="sourceLineNo">545</span>              ().get()));<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      throw new IllegalArgumentException(<a name="line.548"></a>
+<span class="sourceLineNo">549</span>          "First region of table should have empty start key. Instead has: "<a name="line.549"></a>
+<span class="sourceLineNo">550</span>          + Bytes.toStringBinary(first.get()));<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    }<a name="line.551"></a>
+<span class="sourceLineNo">552</span>    sorted.remove(sorted.first());<a name="line.552"></a>
+<span class="sourceLineNo">553</span><a name="line.553"></a>
+<span class="sourceLineNo">554</span>    // Write the actual file<a name="line.554"></a>
+<span class="sourceLineNo">555</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.555"></a>
+<span class="sourceLineNo">556</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      NullWritable.class);<a name="line.558"></a>
+<span class="sourceLineNo">559</span><a name="line.559"></a>
+<span class="sourceLineNo">560</span>    try {<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        writer.append(startKey, NullWritable.get());<a name="line.562"></a>
+<span class="sourceLineNo">563</span>      }<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    } finally {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      writer.close();<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    }<a name="line.566"></a>
+<span class="sourceLineNo">567</span>  }<a name="line.567"></a>
+<span class="sourceLineNo">568</span><a name="line.568"></a>
+<span class="sourceLineNo">569</span>  /**<a name="line.569"></a>
+<span class="sourceLineNo">570</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.570"></a>
+<span class="sourceLineNo">571</span>   * table. This<a name="line.571"></a>
+<span class="sourceLineNo">572</span>   * &lt;ul&gt;<a name="line.572"></a>
+<span class="sourceLineNo">573</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * &lt;/ul&gt;<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   * running this function.<a name="line.581"></a>
+<span class="sourceLineNo">582</span>   */<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.583"></a>
+<span class="sourceLineNo">584</span>      throws IOException {<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
+<span class="sourceLineNo">587</span><a name="line.587"></a>
+<span class="sourceLineNo">588</span>  /**<a name="line.588"></a>
+<span class="sourceLineNo">589</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.589"></a>
+<span class="sourceLineNo">590</span>   * table. This<a name="line.590"></a>
+<span class="sourceLineNo">591</span>   * &lt;ul&gt;<a name="line.591"></a>
+<span class="sourceLineNo">592</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.592"></a>
+<span class="sourceLineNo">593</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.593"></a>
+<span class="sourceLineNo">594</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.594"></a>
+<span class="sourceLineNo">595</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * &lt;/ul&gt;<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * running this function.<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   */<a name="line.601"></a>
+<span class="sourceLineNo">602</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      RegionLocator regionLocator) throws IOException {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>  }<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.610"></a>
+<span class="sourceLineNo">611</span>    Configuration conf = job.getConfiguration();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    job.setOutputFormatClass(cls);<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span>    boolean writeMultipleTables = false;<a name="line.619"></a>
+<span class="sourceLineNo">620</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      writeMultipleTables = true;<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    // sort the incoming values.<a name="line.625"></a>
+<span class="sourceLineNo">626</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.626"></a>
+<span class="sourceLineNo">627</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      job.setReducerClass(CellSortReducer.class);<a name="line.629"></a>
+<span class="sourceLineNo">630</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>      job.setReducerClass(PutSortReducer.class);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      job.setReducerClass(TextSortReducer.class);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    } else {<a name="line.634"></a>
+<span class="sourceLineNo">635</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    }<a name="line.636"></a>
+<span class="sourceLineNo">637</span><a name="line.637"></a>
+<span class="sourceLineNo">638</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.639"></a>
+<span class="sourceLineNo">640</span>        CellSerialization.class.getName());<a name="line.640"></a>
+<span class="sourceLineNo">641</span><a name="line.641"></a>
+<span class="sourceLineNo">642</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.643"></a>
+<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
+<span class="sourceLineNo">645</span><a name="line.645"></a>
+<span class="sourceLineNo">646</span>    /* Now get the region start keys for every table required */<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.647"></a>
+<span class="sourceLineNo">648</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    {<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.653"></a>
+<span class="sourceLineNo">654</span>      String tn = writeMultipleTables?<a name="line.654"></a>
+<span class="sourceLineNo">655</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.655"></a>
+<span class="sourceLineNo">656</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      allTableNames.add(tn);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.660"></a>
+<span class="sourceLineNo">661</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.661"></a>
+<span class="sourceLineNo">662</span>            .toString(tableSeparator)));<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    // Use table's region boundaries for TOP split points.<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.665"></a>
+<span class="sourceLineNo">666</span>        "to match current region count for all tables");<a name="line.666"></a>
+<span class="sourceLineNo">667</span>    job.setNumReduceTasks(startKeys.size());<a name="line.667"></a>
+<span class="sourceLineNo">668</span><a name="line.668"></a>
+<span class="sourceLineNo">669</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.669"></a>
+<span class="sourceLineNo">670</span>    // Set compression algorithms based on column families<a name="line.670"></a>
+<span class="sourceLineNo">671</span><a name="line.671"></a>
+<span class="sourceLineNo">672</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.672"></a>
+<span class="sourceLineNo">673</span>            tableDescriptors));<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.674"></a>
 <span class="sourceLineNo">675</span>            tableDescriptors));<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.676"></a>
+<span class="sourceLineNo">676</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.676"></a>
 <span class="sourceLineNo">677</span>            tableDescriptors));<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>            tableDescriptors));<a name="line.679"></a>
-<span class="sourceLineNo">680</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        tableDescriptors));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.682"></a>
-<span class="sourceLineNo">683</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.683"></a>
-<span class="sourceLineNo">684</span><a name="line.684"></a>
-<span class="sourceLineNo">685</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    TableMapReduceUtil.initCredentials(job);<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.687"></a>
-<span class="sourceLineNo">688</span>  }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    Configuration conf = job.getConfiguration();<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.699"></a>
-<span class="sourceLineNo">700</span><a name="line.700"></a>
-<span class="sourceLineNo">701</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    // Set compression algorithms based on column families<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY,<a name="line.703"></a>
-<span class="sourceLineNo">704</span>        serializeColumnFamilyAttribute(compressionDetails, singleTableDescriptor));<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY,<a name="line.705"></a>
-<span class="sourceLineNo">706</span>        serializeColumnFamilyAttribute(blockSizeDetails, singleTableDescriptor));<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY,<a name="line.707"></a>
-<span class="sourceLineNo">708</span>        serializeColumnFamilyAttribute(bloomTypeDetails, singleTableDescriptor));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY,<a name="line.709"></a>
-<span class="sourceLineNo">710</span>        serializeColumnFamilyAttribute(bloomParamDetails, singleTableDescriptor));<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.711"></a>
-<span class="sourceLineNo">712</span>        serializeColumnFamilyAttribute(dataBlockEncodingDetails, singleTableDescriptor));<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    TableMapReduceUtil.initCredentials(job);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    LOG.info("Incremental table " + tableDescriptor.getTableName() + " output configured.");<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  /**<a name="line.719"></a>
-<span class="sourceLineNo">720</span>   * Runs inside the task to deserialize column family to compression algorithm<a name="line.720"></a>
-<span class="sourceLineNo">721</span>   * map from the configuration.<a name="line.721"></a>
-<span class="sourceLineNo">722</span>   *<a name="line.722"></a>
-<span class="sourceLineNo">723</span>   * @param conf to read the serialized values from<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * @return a map from column family to the configured compression algorithm<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   */<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  @VisibleForTesting<a name="line.726"></a>
-<span class="sourceLineNo">727</span>  static Map&lt;byte[], Algorithm&gt; createFamilyCompressionMap(Configuration<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      conf) {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        COMPRESSION_FAMILIES_CONF_KEY);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    Map&lt;byte[], Algorithm&gt; compressionMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue());<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      compressionMap.put(e.getKey(), algorithm);<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return compressionMap;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>   * Runs inside the task to deserialize column family to bloom filter type<a name="line.740"></a>
-<span class="sourceLineNo">741</span>   * map from the configuration.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   *<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * @param conf to read the serialized values from<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   * @return a map from column family to the the configured bloom filter type<a name="line.744"></a>
-<span class="sourceLineNo">745</span>   */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>  @VisibleForTesting<a name="line.746"></a>
-<span class="sourceLineNo">747</span>  static Map&lt;byte[], BloomType&gt; createFamilyBloomTypeMap(Configuration conf) {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        BLOOM_TYPE_FAMILIES_CONF_KEY);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    Map&lt;byte[], BloomType&gt; bloomTypeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      BloomType bloomType = BloomType.valueOf(e.getValue());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      bloomTypeMap.put(e.getKey(), bloomType);<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    }<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    return bloomTypeMap;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>  }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>  /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   * Runs inside the task to deserialize column family to bloom filter param<a name="line.759"></a>
-<span class="sourceLineNo">760</span>   * map from the configuration.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>   *<a name="line.761"></a>
-<span class="sourceLineNo">762</span>   * @param conf to read the serialized values from<a name="line.762"></a>
-<span class="sourceLineNo">763</span>   * @return a map from column family to the the configured bloom filter param<a name="line.763"></a>
-<span class="sourceLineNo">764</span>   */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>  @VisibleForTesting<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  static Map&lt;byte[], String&gt; createFamilyBloomParamMap(Configuration conf) {<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    return createFamilyConfValueMap(conf, BLOOM_PARAM_FAMILIES_CONF_KEY);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>  }<a name="line.768"></a>
-<span class="sourceLineNo">769</span><a name="line.769"></a>
-<span class="sourceLineNo">770</span><a name="line.770"></a>
-<span class="sourceLineNo">771</span>  /**<a name="line.771"></a>
-<span class="sourceLineNo">772</span>   * Runs inside the task to deserialize column family to block size<a name="line.772"></a>
-<span class="sourceLineNo">773</span>   * map from the configuration.<a name="line.773"></a>
-<span class="sourceLineNo">774</span>   *<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * @param conf to read the serialized values from<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * @return a map from column family to the configured block size<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   */<a name="line.777"></a>
-<span class="sourceLineNo">778</span>  @VisibleForTesting<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  static Map&lt;byte[], Integer&gt; createFamilyBlockSizeMap(Configuration conf) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        BLOCK_SIZE_FAMILIES_CONF_KEY);<a name="line.781"></a>
-<span class="sourceLineNo">782</span>    Map&lt;byte[], Integer&gt; blockSizeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      Integer blockSize = Integer.parseInt(e.getValue());<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      blockSizeMap.put(e.getKey(), blockSize);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    return blockSizeMap;<a name="line.787"></a>
-<span class="sourceLineNo">788</span>  }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>  /**<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * Runs inside the task to deserialize column family to data block encoding<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * type map from the configuration.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   *<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   * @param conf to read the serialized values from<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @return a map from column family to HFileDataBlockEncoder for the<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   *         configured data block type for the family<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   */<a name="line.797"></a>
-<span class="sourceLineNo">798</span>  @VisibleForTesting<a name="line.798"></a>
-<span class="sourceLineNo">799</span>  static Map&lt;byte[], DataBlockEncoding&gt; createFamilyDataBlockEncodingMap(<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      Configuration conf) {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        DATABLOCK_ENCODING_FAMILIES_CONF_KEY);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    Map&lt;byte[], DataBlockEncoding&gt; encoderMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.804"></a>
-<span class="sourceLineNo">805</span>      encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    }<a name="line.806"></a>
-<span class="sourceLineNo">807</span>    return encoderMap;<a name="line.807"></a>
-<span class="sourceLineNo">808</span>  }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span><a name="line.810"></a>
-<span class="sourceLineNo">811</span>  /**<a name="line.811"></a>
-<span class="sourceLineNo">812</span>   * Run inside the task to deserialize column family to given conf value map.<a name="line.812"></a>
-<span class="sourceLineNo">813</span>   *<a name="line.813"></a>
-<span class="sourceLineNo">814</span>   * @param conf to read the serialized values from<a name="line.814"></a>
-<span class="sourceLineNo">815</span>   * @param confName conf key to read from the configuration<a name="line.815"></a>
-<span class="sourceLineNo">816</span>   * @return a map of column family to the given configuration value<a name="line.816"></a>
-<span class="sourceLineNo">817</span>   */<a name="line.817"></a>
-<span class="sourceLineNo">818</span>  private static Map&lt;byte[], String&gt; createFamilyConfValueMap(<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      Configuration conf, String confName) {<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    Map&lt;byte[], String&gt; confValMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    String confVal = conf.get(confName, "");<a name="line.821"></a>
-<span class="sourceLineNo">822</span>    for (String familyConf : confVal.split("&amp;")) {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      String[] familySplit = familyConf.split("=");<a name="line.823"></a>
-<span class="sourceLineNo">824</span>      if (familySplit.length != 2) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>        continue;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      try {<a name="line.827"></a>
-<span class="sourceLineNo">828</span>        confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], "UTF-8")),<a name="line.828"></a>
-<span class="sourceLineNo">829</span>            URLDecoder.decode(familySplit[1], "UTF-8"));<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      } catch (UnsupportedEncodingException e) {<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        // will not happen with UTF-8 encoding<a name="line.831"></a>
-<span class="sourceLineNo">832</span>        throw new AssertionError(e);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>    }<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    return confValMap;<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * Configure &lt;code&gt;job&lt;/code&gt; with a TotalOrderPartitioner, partitioning against<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * &lt;code&gt;splitPoints&lt;/code&gt;. Cleans up the partitions file after job exists.<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   */<a name="line.841"></a>
-<span class="sourceLineNo">842</span>  static void configurePartitioner(Job job, List&lt;ImmutableBytesWritable&gt; splitPoints, boolean<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          writeMultipleTables)<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      throws IOException {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>    Configuration conf = job.getConfiguration();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    // create the partitions file<a name="line.846"></a>
-<span class="sourceLineNo">847</span>    FileSystem fs = FileSystem.get(conf);<a name="line.847"></a>
-<span class="sourceLineNo">848</span>    String hbaseTmpFsDir =<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            fs.getHomeDirectory() + "/hbase-staging");<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    fs.makeQualified(partitionsPath);<a name="line.852"></a>
-<span class="sourceLineNo">853</span>    writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    fs.deleteOnExit(partitionsPath);<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    // configure job to use it<a name="line.856"></a>
-<span class="sourceLineNo">857</span>    job.setPartitionerClass(TotalOrderPartitioner.class);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    TotalOrderPartitioner.setPartitionFile(conf, partitionsPath);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")<a name="line.861"></a>
-<span class="sourceLineNo">862</span>  @VisibleForTesting<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static String serializeColumnFamilyAttribute(Function&lt;ColumnFamilyDescriptor, String&gt; fn, List&lt;TableDescriptor&gt; allTables)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws UnsupportedEncodingException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    StringBuilder attributeValue = new StringBuilder();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    int i = 0;<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    for (TableDescriptor tableDescriptor : allTables) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      if (tableDescriptor == null) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        // could happen with mock table instance<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        // CODEREVIEW: Can I set an empty string in conf if mock table instance?<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        return "";<a name="line.871"></a>
-<span class="sourceLineNo">872</span>      }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>        if (i++ &gt; 0) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>          attributeValue.append('&amp;');<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        attributeValue.append(URLEncoder.encode(<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            Bytes.toString(combineTableNameSuffix(tableDescriptor.getTableName().getName(), familyDescriptor.getName())),<a name="line.878"></a>
-<span class="sourceLineNo">879</span>            "UTF-8"));<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        attributeValue.append('=');<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        attributeValue.append(URLEncoder.encode(fn.apply(familyDescriptor), "UTF-8"));<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>    // Get rid of the last ampersand<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    return attributeValue.toString();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>  }<a name="line.886"></a>
-<span class="sourceLineNo">887</span><a name="line.887"></a>
-<span class="sourceLineNo">888</span>  /**<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   * Serialize column family to compression algorithm map to configuration.<a name="line.889"></a>
-<span class="sourceLineNo">890</span>   * Invoked while configuring the MR job for incremental load.<a name="line.890"></a>
-<span class="sourceLineNo">891</span>   *<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * @param tableDescriptor to read the properties from<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * @param conf to persist serialized values into<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   * @throws IOException<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   *           on failure to read column family descriptors<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   */<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  @VisibleForTesting<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; compressionDetails = familyDescriptor -&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>          familyDescriptor.getCompressionType().getName();<a name="line.899"></a>
-<span class="sourceLineNo">900</span><a name="line.900"></a>
-<span class="sourceLineNo">901</span>  /**<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   * Serialize column family to block size map to configuration. Invoked while<a name="line.902"></a>
-<span class="sourceLineNo">903</span>   * configuring the MR job for incremental load.<a name="line.903"></a>
-<span class="sourceLineNo">904</span>   *<a name="line.904"></a>
-<span class="sourceLineNo">905</span>   * @param tableDescriptor<a name="line.905"></a>
-<span class="sourceLineNo">906</span>   *          to read the properties from<a name="line.906"></a>
-<span class="sourceLineNo">907</span>   * @param conf<a name="line.907"></a>
-<span class="sourceLineNo">908</span>   *          to persist serialized values into<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   *<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @throws IOException<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   *           on failure to read column family descriptors<a name="line.911"></a>
-<span class="sourceLineNo">912</span>   */<a name="line.912"></a>
-<span class="sourceLineNo">913</span>  @VisibleForTesting<a name="line.913"></a>
-<span class="sourceLineNo">914</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; blockSizeDetails = familyDescriptor -&gt; String<a name="line.914"></a>
-<span class="sourceLineNo">915</span>          .valueOf(familyDescriptor.getBlocksize());<a name="line.915"></a>
-<span class="sourceLineNo">916</span><a name="line.916"></a>
-<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   * Serialize column family to bloom type map to configuration. Invoked while<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * configuring the MR job for incremental load.<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   *<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   * @param tableDescriptor<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   *          to read the properties from<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param conf<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   *          to persist serialized values into<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   *<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   * @throws IOException<a name="line.926"></a>
-<span class="sourceLineNo">927</span>   *           on failure to read column family descriptors<a name="line.927"></a>
-<span class="sourceLineNo">928</span>   */<a name="line.928"></a>
-<span class="sourceLineNo">929</span>  @VisibleForTesting<a name="line.929"></a>
-<span class="sourceLineNo">930</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomTypeDetails = familyDescriptor -&gt; {<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    String bloomType = familyDescriptor.getBloomFilterType().toString();<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    if (bloomType == null) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      bloomType = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();<a name="line.933"></a>
-<span class="sourceLineNo">934</span>    }<a name="line.934"></a>
-<span class="sourceLineNo">935</span>    return bloomType;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>  };<a name="line.936"></a>
-<span class="sourceLineNo">937</span><a name="line.937"></a>
-<span class="sourceLineNo">938</span>  /**<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   * Serialize column family to bloom param map to configuration. Invoked while<a name="line.939"></a>
-<span class="sourceLineNo">940</span>   * configuring the MR job for incremental load.<a name="line.940"></a>
-<span class="sourceLineNo">941</span>   *<a name="line.941"></a>
-<span class="sourceLineNo">942</span>   * @param tableDescriptor<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   *          to read the properties from<a name="line.943"></a>
-<span class="sourceLineNo">944</span>   * @param conf<a name="line.944"></a>
-<span class="sourceLineNo">945</span>   *          to persist serialized values into<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   *<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * @throws IOException<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   *           on failure to read column family descriptors<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   */<a name="line.949"></a>
-<span class="sourceLineNo">950</span>  @VisibleForTesting<a name="line.950"></a>
-<span class="sourceLineNo">951</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomParamDetails = familyDescriptor -&gt; {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    BloomType bloomType = familyDescriptor.getBloomFilterType();<a name="line.952"></a>
-<span class="sourceLineNo">953</span>    String bloomParam = "";<a name="line.953"></a>
-<span class="sourceLineNo">954</span>    if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY);<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    } else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.DELIMITER_KEY);<a name="line.957"></a>
-<span class="sourceLineNo">958</span>    }<a name="line.958"></a>
-<span class="sourceLineNo">959</span>    return bloomParam;<a name="line.959"></a>
-<span class="sourceLineNo">960</span>  };<a name="line.960"></a>
-<span class="sourceLineNo">961</span><a name="line.961"></a>
-<span class="sourceLineNo">962</span>  /**<a name="line.962"></a>
-<span class="sourceLineNo">963</span>   * Serialize column family to data block encoding map to configuration.<a name="line.963"></a>
-<span class="sourceLineNo">964</span>   * Invoked while configuring the MR job for incremental load.<a name="line.964"></a>
-<span class="sourceLineNo">965</span>   *<a name="line.965"></a>
-<span class="sourceLineNo">966</span>   * @param tableDescriptor<a name="line.966"></a>
-<span class="sourceLineNo">967</span>   *          to read the properties from<a name="line.967"></a>
-<span class="sourceLineNo">968</span>   * @param conf<a name="line.968"></a>
-<span class="sourceLineNo">969</span>   *          to persist serialized values into<a name="line.969"></a>
-<span class="sourceLineNo">970</span>   * @throws IOException<a name="line.970"></a>
-<span class="sourceLineNo">971</span>   *           on failure to read column family descriptors<a name="line.971"></a>
-<span class="sourceLineNo">972</span>   */<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  @VisibleForTesting<a name="line.973"></a>
-<span class="sourceLineNo">974</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; dataBlockEncodingDetails = familyDescriptor -&gt; {<a name="line.974"></a>
-<span class="sourceLineNo">975</span>    DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    if (encoding == null) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      encoding = DataBlockEncoding.NONE;<a name="line.977"></a>
-<span class="sourceLineNo">978</span>    }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>    return encoding.toString();<a name="line.979"></a>
-<span class="sourceLineNo">980</span>  };<a name="line.980"></a>
-<span class="sourceLineNo">981</span><a name="line.981"></a>
-<span class="sourceLineNo">982</span>}<a name="line.982"></a>
+<span class="sourceLineNo">678</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.678"></a>
+<span class="sourceLineNo">679</span>        tableDescriptors));<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.680"></a>
+<span class="sourceLineNo">681</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.683"></a>
+<span class="sourceLineNo">684</span>    TableMapReduceUtil.initCredentials(job);<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.685"></a>
+<span class="sourceLineNo">686</span>  }<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.688"></a>
+<span class="sourceLineNo">689</span>      IOException {<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    Configuration conf = job.getConfiguration();<a name="line.690"></a>
+<span class="sourceLineNo">691</span><a name="line.691"></a>
+<span class="sourceLineNo">692</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.692"></a>
+<span class="sourceLineNo">693</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.694"></a>
+<span class="sourceLineNo">695</span><a name="line.695"></a>
+<span class="sourceLineNo">696</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.696"></a>
+<span class="sourceLineNo">697</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.697"></a>
+<span class="sourceLineNo">698</span><a name="line.698"></a>
+<span class="sourceLineNo">699</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    // Set compression algorithms based on column familie

<TRUNCATED>

[15/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="line.1543"></a>
-<spa

<TRUNCATED>

[33/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 2a3e958..87091b9 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
 <span class="sourceLineNo">011</span>  public static final String revision = "";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Tue Dec 11 14:44:59 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Wed Dec 12 14:43:53 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "f3af750f342fb0851739ef407054f5df";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "67847fa5f6c0734bf527ab6a4205389e";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 


[32/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
index 2559531..06869f5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
@@ -364,627 +364,625 @@
 <span class="sourceLineNo">356</span>   */<a name="line.356"></a>
 <span class="sourceLineNo">357</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.357"></a>
 <span class="sourceLineNo">358</span>       conf) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    Configuration tempConf = new Configuration(conf);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      CacheConfig cacheConf) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    int version = getFormatVersion(conf);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    switch (version) {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    case 2:<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.373"></a>
-<span class="sourceLineNo">374</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        "in hbase-site.xml)");<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    case 3:<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    default:<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          "format version " + version);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    }<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /**<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * An abstraction used by the block index.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public interface CachingBlockReader {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     * Read in a file block.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>     * @param offset offset to read.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>     * @param onDiskBlockSize size of the block<a name="line.393"></a>
-<span class="sourceLineNo">394</span>     * @param cacheBlock<a name="line.394"></a>
-<span class="sourceLineNo">395</span>     * @param pread<a name="line.395"></a>
-<span class="sourceLineNo">396</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.396"></a>
-<span class="sourceLineNo">397</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     *  caching efficiency of encoded data blocks)<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.401"></a>
-<span class="sourceLineNo">402</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.402"></a>
-<span class="sourceLineNo">403</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>     * @return Block wrapped in a ByteBuffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>     * @throws IOException<a name="line.405"></a>
-<span class="sourceLineNo">406</span>     */<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        throws IOException;<a name="line.411"></a>
-<span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>    /**<a name="line.413"></a>
-<span class="sourceLineNo">414</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.414"></a>
-<span class="sourceLineNo">415</span>     * @param block Block to be returned.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>     */<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    void returnBlock(HFileBlock block);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.423"></a>
-<span class="sourceLineNo">424</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.424"></a>
-<span class="sourceLineNo">425</span>     * write.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>     */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    String getName();<a name="line.427"></a>
+<span class="sourceLineNo">359</span>    return HFile.getWriterFactory(conf, CacheConfig.DISABLED);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      CacheConfig cacheConf) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    int version = getFormatVersion(conf);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    switch (version) {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    case 2:<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.371"></a>
+<span class="sourceLineNo">372</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        "in hbase-site.xml)");<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    case 3:<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    default:<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          "format version " + version);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * An abstraction used by the block index.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  public interface CachingBlockReader {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>     * Read in a file block.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>     * @param offset offset to read.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>     * @param onDiskBlockSize size of the block<a name="line.391"></a>
+<span class="sourceLineNo">392</span>     * @param cacheBlock<a name="line.392"></a>
+<span class="sourceLineNo">393</span>     * @param pread<a name="line.393"></a>
+<span class="sourceLineNo">394</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.394"></a>
+<span class="sourceLineNo">395</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.396"></a>
+<span class="sourceLineNo">397</span>     *  caching efficiency of encoded data blocks)<a name="line.397"></a>
+<span class="sourceLineNo">398</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.398"></a>
+<span class="sourceLineNo">399</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.399"></a>
+<span class="sourceLineNo">400</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.400"></a>
+<span class="sourceLineNo">401</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.401"></a>
+<span class="sourceLineNo">402</span>     * @return Block wrapped in a ByteBuffer.<a name="line.402"></a>
+<span class="sourceLineNo">403</span>     * @throws IOException<a name="line.403"></a>
+<span class="sourceLineNo">404</span>     */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>        throws IOException;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>    /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>     * @param block Block to be returned.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>     */<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    void returnBlock(HFileBlock block);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.422"></a>
+<span class="sourceLineNo">423</span>     * write.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>     */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    String getName();<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    CellComparator getComparator();<a name="line.427"></a>
 <span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    CellComparator getComparator();<a name="line.429"></a>
+<span class="sourceLineNo">429</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.429"></a>
 <span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.431"></a>
+<span class="sourceLineNo">431</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.431"></a>
 <span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.433"></a>
+<span class="sourceLineNo">433</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.433"></a>
 <span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.435"></a>
+<span class="sourceLineNo">435</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.437"></a>
+<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.437"></a>
 <span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.439"></a>
+<span class="sourceLineNo">439</span>    long length();<a name="line.439"></a>
 <span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>    long length();<a name="line.441"></a>
+<span class="sourceLineNo">441</span>    long getEntries();<a name="line.441"></a>
 <span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    long getEntries();<a name="line.443"></a>
+<span class="sourceLineNo">443</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.443"></a>
 <span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.445"></a>
+<span class="sourceLineNo">445</span>    long indexSize();<a name="line.445"></a>
 <span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>    long indexSize();<a name="line.447"></a>
+<span class="sourceLineNo">447</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.447"></a>
 <span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.449"></a>
+<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.449"></a>
 <span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.451"></a>
+<span class="sourceLineNo">451</span>    FixedFileTrailer getTrailer();<a name="line.451"></a>
 <span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>    FixedFileTrailer getTrailer();<a name="line.453"></a>
+<span class="sourceLineNo">453</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.453"></a>
 <span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.455"></a>
+<span class="sourceLineNo">455</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.455"></a>
 <span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.457"></a>
+<span class="sourceLineNo">457</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.457"></a>
 <span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    /**<a name="line.461"></a>
-<span class="sourceLineNo">462</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.462"></a>
-<span class="sourceLineNo">463</span>     * {@link HFile} version.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>     * Knows nothing about how that metadata is structured.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>     */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    /**<a name="line.468"></a>
-<span class="sourceLineNo">469</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.469"></a>
-<span class="sourceLineNo">470</span>     * {@link HFile}  version.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>     * Knows nothing about how that metadata is structured.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>     */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.473"></a>
+<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.460"></a>
+<span class="sourceLineNo">461</span>     * {@link HFile} version.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>     * Knows nothing about how that metadata is structured.<a name="line.462"></a>
+<span class="sourceLineNo">463</span>     */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    /**<a name="line.466"></a>
+<span class="sourceLineNo">467</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.467"></a>
+<span class="sourceLineNo">468</span>     * {@link HFile}  version.<a name="line.468"></a>
+<span class="sourceLineNo">469</span>     * Knows nothing about how that metadata is structured.<a name="line.469"></a>
+<span class="sourceLineNo">470</span>     */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>    Path getPath();<a name="line.473"></a>
 <span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>    Path getPath();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    /** Close method with optional evictOnClose */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    void close(boolean evictOnClose) throws IOException;<a name="line.478"></a>
+<span class="sourceLineNo">475</span>    /** Close method with optional evictOnClose */<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    void close(boolean evictOnClose) throws IOException;<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.478"></a>
 <span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.480"></a>
+<span class="sourceLineNo">480</span>    boolean hasMVCCInfo();<a name="line.480"></a>
 <span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    boolean hasMVCCInfo();<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>     * Return the file context of the HFile this reader belongs to<a name="line.485"></a>
-<span class="sourceLineNo">486</span>     */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    HFileContext getFileContext();<a name="line.487"></a>
+<span class="sourceLineNo">482</span>    /**<a name="line.482"></a>
+<span class="sourceLineNo">483</span>     * Return the file context of the HFile this reader belongs to<a name="line.483"></a>
+<span class="sourceLineNo">484</span>     */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    HFileContext getFileContext();<a name="line.485"></a>
+<span class="sourceLineNo">486</span><a name="line.486"></a>
+<span class="sourceLineNo">487</span>    boolean isPrimaryReplicaReader();<a name="line.487"></a>
 <span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    boolean isPrimaryReplicaReader();<a name="line.489"></a>
+<span class="sourceLineNo">489</span>    boolean shouldIncludeMemStoreTS();<a name="line.489"></a>
 <span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>    boolean shouldIncludeMemStoreTS();<a name="line.491"></a>
+<span class="sourceLineNo">491</span>    boolean isDecodeMemStoreTS();<a name="line.491"></a>
 <span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    boolean isDecodeMemStoreTS();<a name="line.493"></a>
+<span class="sourceLineNo">493</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.493"></a>
 <span class="sourceLineNo">494</span><a name="line.494"></a>
-<span class="sourceLineNo">495</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>    @VisibleForTesting<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>    @VisibleForTesting<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    boolean prefetchComplete();<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>    /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.504"></a>
-<span class="sourceLineNo">505</span>     * implementation should take care of thread safety.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>     */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    void unbufferStream();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Method returns the reader given the specified arguments.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   *<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * @param path hfile's path<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * @param fsdis stream of path's file<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param size max size of the trailer.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hfs<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return an appropriate instance of HFileReader<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      justification="Intentional")<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    FixedFileTrailer trailer = null;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    try {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      switch (trailer.getMajorVersion()) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        case 2:<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.535"></a>
-<span class="sourceLineNo">536</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        case 3:<a name="line.537"></a>
-<span class="sourceLineNo">538</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.538"></a>
-<span class="sourceLineNo">539</span>              primaryReplicaReader, conf);<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        default:<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } catch (Throwable t) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      IOUtils.closeQuietly(fsdis);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    } finally {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      fsdis.unbuffer();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>  /**<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * The sockets and the file descriptors held by the method parameter<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * that no other threads have access to the same passed reference.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param fs A file system<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @param path Path to HFile<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * @param fsdis a stream of path's file<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @param size max size of the trailer.<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.559"></a>
-<span class="sourceLineNo">560</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.560"></a>
-<span class="sourceLineNo">561</span>   * @param conf Configuration<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @return A version specific Hfile Reader<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      throws IOException {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    HFileSystem hfs = null;<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // the filesystem.<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (!(fs instanceof HFileSystem)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      hfs = new HFileSystem(fs);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    } else {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      hfs = (HFileSystem) fs;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    }<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
-<span class="sourceLineNo">583</span>  * Creates reader with cache configuration disabled<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  * @param fs filesystem<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  * @param path Path to file to read<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  * @return an active Reader instance<a name="line.586"></a>
-<span class="sourceLineNo">587</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  */<a name="line.589"></a>
-<span class="sourceLineNo">590</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      throws IOException {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.592"></a>
-<span class="sourceLineNo">593</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>  }<a name="line.595"></a>
-<span class="sourceLineNo">596</span><a name="line.596"></a>
-<span class="sourceLineNo">597</span>  /**<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   * @param fs filesystem<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   * @param path Path to file to read<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * @param cacheConf This must not be null. @see<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   * @return an active Reader instance<a name="line.603"></a>
-<span class="sourceLineNo">604</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.604"></a>
-<span class="sourceLineNo">605</span>   *           is corrupt/invalid.<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   */<a name="line.606"></a>
-<span class="sourceLineNo">607</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>  }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>  /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * The sockets and the file descriptors held by the method parameter<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.618"></a>
-<span class="sourceLineNo">619</span>   * that no other threads have access to the same passed reference.<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
-<span class="sourceLineNo">622</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.625"></a>
-<span class="sourceLineNo">626</span>  }<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>  /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @param fs filesystem<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * @param path Path to file to verify<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * @throws IOException if failed to read from the underlying stream<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   */<a name="line.634"></a>
-<span class="sourceLineNo">635</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>  }<a name="line.637"></a>
-<span class="sourceLineNo">638</span><a name="line.638"></a>
-<span class="sourceLineNo">639</span>  /**<a name="line.639"></a>
-<span class="sourceLineNo">640</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>   * @param fs filesystem<a name="line.641"></a>
-<span class="sourceLineNo">642</span>   * @param fileStatus the file to verify<a name="line.642"></a>
-<span class="sourceLineNo">643</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.643"></a>
-<span class="sourceLineNo">644</span>   * @throws IOException if failed to read from the underlying stream<a name="line.644"></a>
-<span class="sourceLineNo">645</span>   */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    final Path path = fileStatus.getPath();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    final long size = fileStatus.getLen();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try (FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path)) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      return true;<a name="line.654"></a>
-<span class="sourceLineNo">655</span>    } catch (IllegalArgumentException e) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return false;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public FileInfo() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      super();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.680"></a>
-<span class="sourceLineNo">681</span>     * key prefix.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>     *<a name="line.682"></a>
-<span class="sourceLineNo">683</span>     * @param k key to add<a name="line.683"></a>
-<span class="sourceLineNo">684</span>     * @param v value to add<a name="line.684"></a>
-<span class="sourceLineNo">685</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.685"></a>
-<span class="sourceLineNo">686</span>     *          with the reserved prefix<a name="line.686"></a>
-<span class="sourceLineNo">687</span>     * @return this file info object<a name="line.687"></a>
-<span class="sourceLineNo">688</span>     * @throws IOException if the key or value is invalid<a name="line.688"></a>
-<span class="sourceLineNo">689</span>     */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        final boolean checkPrefix) throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (k == null || v == null) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        throw new NullPointerException("Key nor value may be null");<a name="line.693"></a>
-<span class="sourceLineNo">694</span>      }<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.696"></a>
-<span class="sourceLineNo">697</span>            + " are reserved");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      }<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      put(k, v);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return this;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    public void clear() {<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.map.clear();<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return map.comparator();<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
-<span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public boolean containsKey(Object key) {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      return map.containsKey(key);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public boolean containsValue(Object value) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return map.containsValue(value);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      return map.entrySet();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>    @Override<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    public boolean equals(Object o) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      return map.equals(o);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>    @Override<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    public byte[] firstKey() {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      return map.firstKey();<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    }<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    @Override<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    public byte[] get(Object key) {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      return map.get(key);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    @Override<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    public int hashCode() {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return map.hashCode();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    }<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>    @Override<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return this.map.headMap(toKey);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public boolean isEmpty() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      return map.isEmpty();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    @Override<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      return map.keySet();<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    }<a name="line.761"></a>
-<span class="sourceLineNo">762</span><a name="line.762"></a>
-<span class="sourceLineNo">763</span>    @Override<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    public byte[] lastKey() {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      return map.lastKey();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
-<span class="sourceLineNo">767</span><a name="line.767"></a>
-<span class="sourceLineNo">768</span>    @Override<a name="line.768"></a>
-<span class="sourceLineNo">769</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      return this.map.put(key, value);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    }<a name="line.771"></a>
-<span class="sourceLineNo">772</span><a name="line.772"></a>
-<span class="sourceLineNo">773</span>    @Override<a name="line.773"></a>
-<span class="sourceLineNo">774</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      this.map.putAll(m);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public byte[] remove(Object key) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      return this.map.remove(key);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>    @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    public int size() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return map.size();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>    @Override<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      return this.map.subMap(fromKey, toKey);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
-<span class="sourceLineNo">792</span><a name="line.792"></a>
-<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      return this.map.tailMap(fromKey);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
-<span class="sourceLineNo">797</span><a name="line.797"></a>
-<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    public Collection&lt;byte[]&gt; values() {<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return map.values();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
-<span class="sourceLineNo">802</span><a name="line.802"></a>
-<span class="sourceLineNo">803</span>    /**<a name="line.803"></a>
-<span class="sourceLineNo">804</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.804"></a>
-<span class="sourceLineNo">805</span>     * We write it as a protobuf.<a name="line.805"></a>
-<span class="sourceLineNo">806</span>     * @param out<a name="line.806"></a>
-<span class="sourceLineNo">807</span>     * @throws IOException<a name="line.807"></a>
-<span class="sourceLineNo">808</span>     * @see #read(DataInputStream)<a name="line.808"></a>
-<span class="sourceLineNo">809</span>     */<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    void write(final DataOutputStream out) throws IOException {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey()));<a name="line.814"></a>
-<span class="sourceLineNo">815</span>        bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue()));<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      builder.build().writeDelimitedTo(out);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>    /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.823"></a>
-<span class="sourceLineNo">824</span>     * Can deserialize protobuf of old Writables format.<a name="line.824"></a>
-<span class="sourceLineNo">825</span>     * @param in<a name="line.825"></a>
-<span class="sourceLineNo">826</span>     * @throws IOException<a name="line.826"></a>
-<span class="sourceLineNo">827</span>     * @see #write(DataOutputStream)<a name="line.827"></a>
-<span class="sourceLineNo">828</span>     */<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    void read(final DataInputStream in) throws IOException {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      byte [] pbuf = new byte[pblen];<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      if (in.markSupported()) in.mark(pblen);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int read = in.read(pbuf);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      } else {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>        if (in.markSupported()) {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>          in.reset();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          parseWritable(in);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        } else {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.843"></a>
-<span class="sourceLineNo">844</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.845"></a>
-<span class="sourceLineNo">846</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          parseWritable(new DataInputStream(sis));<a name="line.849"></a>
-<span class="sourceLineNo">850</span>        }<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      }<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * @throws IOException<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      this.map.clear();<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      // Read the number of entries in the map<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      int entries = in.readInt();<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      // Then read each key/value pair<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>        byte [] key = Bytes.readByteArray(in);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.867"></a>
-<span class="sourceLineNo">868</span>        in.readByte();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        byte [] value = Bytes.readByteArray(in);<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        this.map.put(key, value);<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    }<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    /**<a name="line.874"></a>
-<span class="sourceLineNo">875</span>     * Fill our map with content of the pb we read off disk<a name="line.875"></a>
-<span class="sourceLineNo">876</span>     * @param fip protobuf message to read<a name="line.876"></a>
-<span class="sourceLineNo">877</span>     */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      this.map.clear();<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  }<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  /**<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * HFile.Writer.<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   *<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   * @return Array of strings, each represents a supported compression<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.896"></a>
-<span class="sourceLineNo">897</span>   *         supported.<a name="line.897"></a>
-<span class="sourceLineNo">898</span>   *         &lt;ul&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>   *         &lt;li&gt;"none" - No compression.<a name="line.899"></a>
-<span class="sourceLineNo">900</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>   *         &lt;/ul&gt;<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   */<a name="line.902"></a>
-<span class="sourceLineNo">903</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>    return Compression.getSupportedAlgorithms();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>  }<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>  // Utility methods.<a name="line.907"></a>
-<span class="sourceLineNo">908</span>  /*<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   * @param l Long to convert to an int.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   */<a name="line.911"></a>
-<span class="sourceLineNo">912</span>  static int longToInt(final long l) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>  }<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>  /**<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   * empty list.<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   *<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   * @param fs  The file system reference.<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param regionDir  The region directory to scan.<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   * @return The list of files found.<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   * @throws IOException When scanning the files fails.<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   */<a name="line.926"></a>
-<span class="sourceLineNo">927</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      throws IOException {<a name="line.928"></a>
-<span class="sourceLineNo">929</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;&gt;();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    for(FileStatus dir : familyDirs) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      for (FileStatus file : files) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>        if (!file.isDirectory() &amp;&amp;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.936"></a>
-<span class="sourceLineNo">937</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>          regionHFiles.add(file.getPath());<a name="line.938"></a>
-<span class="sourceLineNo">939</span>        }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      }<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return regionHFiles;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * indicate that this is not a software error, but corrupted input.<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   *<a name="line.950"></a>
-<span class="sourceLineNo">951</span>   * @param version an HFile version<a name="line.951"></a>
-<span class="sourceLineNo">952</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.952"></a>
-<span class="sourceLineNo">953</span>   */<a name="line.953"></a>
-<span class="sourceLineNo">954</span>  public static void checkFormatVersion(int version)<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      throws IllegalArgumentException {<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.957"></a>
-<span class="sourceLineNo">958</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.958"></a>
-<span class="sourceLineNo">959</span>          + MAX_FORMAT_VERSION + ")");<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    }<a name="line.960"></a>
-<span class="sourceLineNo">961</span>  }<a name="line.961"></a>
-<span class="sourceLineNo">962</span><a name="line.962"></a>
-<span class="sourceLineNo">963</span><a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public static void checkHFileVersion(final Configuration c) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>    int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    if (version &lt; MAX_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>      throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY +<a name="line.967"></a>
-<span class="sourceLineNo">968</span>        " (in your hbase-*.xml files) is " + version + " which does not match " +<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        MAX_FORMAT_VERSION +<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        "; are you running with a configuration from an older or newer hbase install (an " +<a name="line.970"></a>
-<span class="sourceLineNo">971</span>        "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?");<a name="line.971"></a>
-<span class="sourceLineNo">972</span>    }<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  }<a name="line.973"></a>
-<span class="sourceLineNo">974</span><a name="line.974"></a>
-<span class="sourceLineNo">975</span>  public static void main(String[] args) throws Exception {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    // delegate to preserve old behavior<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    HFilePrettyPrinter.main(args);<a name="line.977"></a>
-<span class="sourceLineNo">978</span>  }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>}<a name="line.979"></a>
+<span class="sourceLineNo">495</span>    @VisibleForTesting<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>    @VisibleForTesting<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    boolean prefetchComplete();<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>    /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>     * implementation should take care of thread safety.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>     */<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    void unbufferStream();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
+<span class="sourceLineNo">507</span><a name="line.507"></a>
+<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>
+<span class="sourceLineNo">509</span>   * Method returns the reader given the specified arguments.<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   *<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param path hfile's path<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param fsdis stream of path's file<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param size max size of the trailer.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * @param hfs<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * @return an appropriate instance of HFileReader<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
+<span class="sourceLineNo">521</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      justification="Intentional")<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      throws IOException {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    FixedFileTrailer trailer = null;<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    try {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      switch (trailer.getMajorVersion()) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        case 2:<a name="line.532"></a>
+<span class="sourceLineNo">533</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        case 3:<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              primaryReplicaReader, conf);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        default:<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    } catch (Throwable t) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      IOUtils.closeQuietly(fsdis);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    } finally {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      fsdis.unbuffer();<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * The sockets and the file descriptors held by the method parameter<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * that no other threads have access to the same passed reference.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * @param fs A file system<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * @param path Path to HFile<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * @param fsdis a stream of path's file<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   * @param size max size of the trailer.<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param conf Configuration<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return A version specific Hfile Reader<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      throws IOException {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    HFileSystem hfs = null;<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    // the filesystem.<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    if (!(fs instanceof HFileSystem)) {<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      hfs = new HFileSystem(fs);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    } else {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      hfs = (HFileSystem) fs;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
+<span class="sourceLineNo">581</span>  * Creates reader with cache configuration disabled<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  * @param fs filesystem<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  * @param path Path to file to read<a name="line.583"></a>
+<span class="sourceLineNo">584</span>  * @return an active Reader instance<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>  */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>  }<a name="line.593"></a>
+<span class="sourceLineNo">594</span><a name="line.594"></a>
+<span class="sourceLineNo">595</span>  /**<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   * @param fs filesystem<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   * @param path Path to file to read<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * @param cacheConf This must not be null. @see<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * @return an active Reader instance<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   *           is corrupt/invalid.<a name="line.603"></a>
+<span class="sourceLineNo">604</span>   */<a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * The sockets and the file descriptors held by the method parameter<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * that no other threads have access to the same passed reference.<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
+<span class="sourceLineNo">619</span>  @VisibleForTesting<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * @param fs filesystem<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   * @param path Path to file to verify<a name="line.629"></a>
+<span class="sourceLineNo">630</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.630"></a>
+<span class="sourceLineNo">631</span>   * @throws IOException if failed to read from the underlying stream<a name="line.631"></a>
+<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
+<span class="sourceLineNo">633</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  /**<a name="line.637"></a>
+<span class="sourceLineNo">638</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * @param fs filesystem<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * @param fileStatus the file to verify<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @throws IOException if failed to read from the underlying stream<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throws IOExceptio

<TRUNCATED>

[19/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="l

<TRUNCATED>

[05/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    /

<TRUNCATED>

[25/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
index 5ee521a..68dec5a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
@@ -422,572 +422,570 @@
 <span class="sourceLineNo">414</span>        DataBlockEncoding encoding = overriddenEncoding;<a name="line.414"></a>
 <span class="sourceLineNo">415</span>        encoding = encoding == null ? datablockEncodingMap.get(tableAndFamily) : encoding;<a name="line.415"></a>
 <span class="sourceLineNo">416</span>        encoding = encoding == null ? DataBlockEncoding.NONE : encoding;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        Configuration tempConf = new Configuration(conf);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.419"></a>
-<span class="sourceLineNo">420</span>                                    .withCompression(compression)<a name="line.420"></a>
-<span class="sourceLineNo">421</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.421"></a>
-<span class="sourceLineNo">422</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                                    .withBlockSize(blockSize);<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          contextBuilder.withIncludesTags(true);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        if (null == favoredNodes) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          wl.writer =<a name="line.432"></a>
-<span class="sourceLineNo">433</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs)<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        } else {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>          wl.writer =<a name="line.437"></a>
-<span class="sourceLineNo">438</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))<a name="line.438"></a>
-<span class="sourceLineNo">439</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.439"></a>
-<span class="sourceLineNo">440</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.440"></a>
-<span class="sourceLineNo">441</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>        this.writers.put(tableAndFamily, wl);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        return wl;<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        if (w != null) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.450"></a>
-<span class="sourceLineNo">451</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.452"></a>
-<span class="sourceLineNo">453</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>              Bytes.toBytes(true));<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>              Bytes.toBytes(compactionExclude));<a name="line.457"></a>
-<span class="sourceLineNo">458</span>          w.appendTrackedTimestampsToMetadata();<a name="line.458"></a>
-<span class="sourceLineNo">459</span>          w.close();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      @Override<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      public void close(TaskAttemptContext c)<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      throws IOException, InterruptedException {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        for (WriterLength wl: this.writers.values()) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          close(wl.writer);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    };<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
-<span class="sourceLineNo">474</span>   * Configure block storage policy for CF after the directory is created.<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   */<a name="line.475"></a>
-<span class="sourceLineNo">476</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      return;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    String policy =<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.483"></a>
-<span class="sourceLineNo">484</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
-<span class="sourceLineNo">487</span><a name="line.487"></a>
-<span class="sourceLineNo">488</span>  /*<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   */<a name="line.490"></a>
-<span class="sourceLineNo">491</span>  static class WriterLength {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    long written = 0;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    StoreFileWriter writer = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Return the start keys of all of the regions in this table,<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * as a list of ImmutableBytesWritable.<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   */<a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.500"></a>
-<span class="sourceLineNo">501</span>                                                                 boolean writeMultipleTables)<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          throws IOException {<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      TableName tableName = regionLocator.getName();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      for (byte[] byteKey : byteKeys) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        if (writeMultipleTables)<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        {<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          //MultiTableHFileOutputFormat use case<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        if (LOG.isDebugEnabled()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.518"></a>
-<span class="sourceLineNo">519</span>                  (fullKey) + "]");<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    }<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return ret;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("deprecation")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    if (startKeys.isEmpty()) {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>      throw new IllegalArgumentException("No regions passed");<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    }<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // We're generating a list of split points, and we don't ever<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    // empty reducer with index 0<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    ImmutableBytesWritable first = sorted.first();<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    if (writeMultipleTables) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              ().get()));<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      throw new IllegalArgumentException(<a name="line.550"></a>
-<span class="sourceLineNo">551</span>          "First region of table should have empty start key. Instead has: "<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          + Bytes.toStringBinary(first.get()));<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    sorted.remove(sorted.first());<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    // Write the actual file<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      NullWritable.class);<a name="line.560"></a>
-<span class="sourceLineNo">561</span><a name="line.561"></a>
-<span class="sourceLineNo">562</span>    try {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.563"></a>
-<span class="sourceLineNo">564</span>        writer.append(startKey, NullWritable.get());<a name="line.564"></a>
-<span class="sourceLineNo">565</span>      }<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    } finally {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      writer.close();<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  /**<a name="line.571"></a>
-<span class="sourceLineNo">572</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.572"></a>
-<span class="sourceLineNo">573</span>   * table. This<a name="line.573"></a>
-<span class="sourceLineNo">574</span>   * &lt;ul&gt;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   * &lt;/ul&gt;<a name="line.581"></a>
-<span class="sourceLineNo">582</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   * running this function.<a name="line.583"></a>
-<span class="sourceLineNo">584</span>   */<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      throws IOException {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * table. This<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   * &lt;ul&gt;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.596"></a>
-<span class="sourceLineNo">597</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * &lt;/ul&gt;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * running this function.<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
-<span class="sourceLineNo">604</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      RegionLocator regionLocator) throws IOException {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.608"></a>
-<span class="sourceLineNo">609</span>  }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    Configuration conf = job.getConfiguration();<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    job.setOutputFormatClass(cls);<a name="line.616"></a>
-<span class="sourceLineNo">617</span><a name="line.617"></a>
-<span class="sourceLineNo">618</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    }<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    boolean writeMultipleTables = false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      writeMultipleTables = true;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    // sort the incoming values.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>      job.setReducerClass(CellSortReducer.class);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      job.setReducerClass(PutSortReducer.class);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      job.setReducerClass(TextSortReducer.class);<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.637"></a>
-<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
-<span class="sourceLineNo">639</span><a name="line.639"></a>
-<span class="sourceLineNo">640</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        CellSerialization.class.getName());<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    }<a name="line.646"></a>
-<span class="sourceLineNo">647</span><a name="line.647"></a>
-<span class="sourceLineNo">648</span>    /* Now get the region start keys for every table required */<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.651"></a>
-<span class="sourceLineNo">652</span><a name="line.652"></a>
-<span class="sourceLineNo">653</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      String tn = writeMultipleTables?<a name="line.656"></a>
-<span class="sourceLineNo">657</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      allTableNames.add(tn);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.663"></a>
-<span class="sourceLineNo">664</span>            .toString(tableSeparator)));<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // Use table's region boundaries for TOP split points.<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>        "to match current region count for all tables");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    job.setNumReduceTasks(startKeys.size());<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    // Set compression algorithms based on column families<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.674"></a>
+<span class="sourceLineNo">417</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.417"></a>
+<span class="sourceLineNo">418</span>                                    .withCompression(compression)<a name="line.418"></a>
+<span class="sourceLineNo">419</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.419"></a>
+<span class="sourceLineNo">420</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.420"></a>
+<span class="sourceLineNo">421</span>                                    .withBlockSize(blockSize);<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>          contextBuilder.withIncludesTags(true);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>        }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>        if (null == favoredNodes) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>          wl.writer =<a name="line.430"></a>
+<span class="sourceLineNo">431</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, fs)<a name="line.431"></a>
+<span class="sourceLineNo">432</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.432"></a>
+<span class="sourceLineNo">433</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.433"></a>
+<span class="sourceLineNo">434</span>        } else {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>          wl.writer =<a name="line.435"></a>
+<span class="sourceLineNo">436</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, new HFileSystem(fs))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.437"></a>
+<span class="sourceLineNo">438</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.438"></a>
+<span class="sourceLineNo">439</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        }<a name="line.440"></a>
+<span class="sourceLineNo">441</span><a name="line.441"></a>
+<span class="sourceLineNo">442</span>        this.writers.put(tableAndFamily, wl);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        return wl;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>        if (w != null) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.448"></a>
+<span class="sourceLineNo">449</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.450"></a>
+<span class="sourceLineNo">451</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.452"></a>
+<span class="sourceLineNo">453</span>              Bytes.toBytes(true));<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.454"></a>
+<span class="sourceLineNo">455</span>              Bytes.toBytes(compactionExclude));<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          w.appendTrackedTimestampsToMetadata();<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          w.close();<a name="line.457"></a>
+<span class="sourceLineNo">458</span>        }<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>      @Override<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      public void close(TaskAttemptContext c)<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      throws IOException, InterruptedException {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        for (WriterLength wl: this.writers.values()) {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>          close(wl.writer);<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        }<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    };<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * Configure block storage policy for CF after the directory is created.<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.474"></a>
+<span class="sourceLineNo">475</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      return;<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    }<a name="line.478"></a>
+<span class="sourceLineNo">479</span><a name="line.479"></a>
+<span class="sourceLineNo">480</span>    String policy =<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.481"></a>
+<span class="sourceLineNo">482</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /*<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  static class WriterLength {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    long written = 0;<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    StoreFileWriter writer = null;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
+<span class="sourceLineNo">493</span><a name="line.493"></a>
+<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * Return the start keys of all of the regions in this table,<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   * as a list of ImmutableBytesWritable.<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   */<a name="line.497"></a>
+<span class="sourceLineNo">498</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.498"></a>
+<span class="sourceLineNo">499</span>                                                                 boolean writeMultipleTables)<a name="line.499"></a>
+<span class="sourceLineNo">500</span>          throws IOException {<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.503"></a>
+<span class="sourceLineNo">504</span>    {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>      TableName tableName = regionLocator.getName();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      for (byte[] byteKey : byteKeys) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        if (writeMultipleTables)<a name="line.510"></a>
+<span class="sourceLineNo">511</span>        {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>          //MultiTableHFileOutputFormat use case<a name="line.512"></a>
+<span class="sourceLineNo">513</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>        if (LOG.isDebugEnabled()) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.516"></a>
+<span class="sourceLineNo">517</span>                  (fullKey) + "]");<a name="line.517"></a>
+<span class="sourceLineNo">518</span>        }<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.519"></a>
+<span class="sourceLineNo">520</span>      }<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    return ret;<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  }<a name="line.523"></a>
+<span class="sourceLineNo">524</span><a name="line.524"></a>
+<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
+<span class="sourceLineNo">526</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.526"></a>
+<span class="sourceLineNo">527</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.527"></a>
+<span class="sourceLineNo">528</span>   */<a name="line.528"></a>
+<span class="sourceLineNo">529</span>  @SuppressWarnings("deprecation")<a name="line.529"></a>
+<span class="sourceLineNo">530</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>    if (startKeys.isEmpty()) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>      throw new IllegalArgumentException("No regions passed");<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
+<span class="sourceLineNo">536</span><a name="line.536"></a>
+<span class="sourceLineNo">537</span>    // We're generating a list of split points, and we don't ever<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    // empty reducer with index 0<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    ImmutableBytesWritable first = sorted.first();<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    if (writeMultipleTables) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.544"></a>
+<span class="sourceLineNo">545</span>              ().get()));<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      throw new IllegalArgumentException(<a name="line.548"></a>
+<span class="sourceLineNo">549</span>          "First region of table should have empty start key. Instead has: "<a name="line.549"></a>
+<span class="sourceLineNo">550</span>          + Bytes.toStringBinary(first.get()));<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    }<a name="line.551"></a>
+<span class="sourceLineNo">552</span>    sorted.remove(sorted.first());<a name="line.552"></a>
+<span class="sourceLineNo">553</span><a name="line.553"></a>
+<span class="sourceLineNo">554</span>    // Write the actual file<a name="line.554"></a>
+<span class="sourceLineNo">555</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.555"></a>
+<span class="sourceLineNo">556</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      NullWritable.class);<a name="line.558"></a>
+<span class="sourceLineNo">559</span><a name="line.559"></a>
+<span class="sourceLineNo">560</span>    try {<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        writer.append(startKey, NullWritable.get());<a name="line.562"></a>
+<span class="sourceLineNo">563</span>      }<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    } finally {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      writer.close();<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    }<a name="line.566"></a>
+<span class="sourceLineNo">567</span>  }<a name="line.567"></a>
+<span class="sourceLineNo">568</span><a name="line.568"></a>
+<span class="sourceLineNo">569</span>  /**<a name="line.569"></a>
+<span class="sourceLineNo">570</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.570"></a>
+<span class="sourceLineNo">571</span>   * table. This<a name="line.571"></a>
+<span class="sourceLineNo">572</span>   * &lt;ul&gt;<a name="line.572"></a>
+<span class="sourceLineNo">573</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * &lt;/ul&gt;<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   * running this function.<a name="line.581"></a>
+<span class="sourceLineNo">582</span>   */<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.583"></a>
+<span class="sourceLineNo">584</span>      throws IOException {<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
+<span class="sourceLineNo">587</span><a name="line.587"></a>
+<span class="sourceLineNo">588</span>  /**<a name="line.588"></a>
+<span class="sourceLineNo">589</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.589"></a>
+<span class="sourceLineNo">590</span>   * table. This<a name="line.590"></a>
+<span class="sourceLineNo">591</span>   * &lt;ul&gt;<a name="line.591"></a>
+<span class="sourceLineNo">592</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.592"></a>
+<span class="sourceLineNo">593</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.593"></a>
+<span class="sourceLineNo">594</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.594"></a>
+<span class="sourceLineNo">595</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * &lt;/ul&gt;<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * running this function.<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   */<a name="line.601"></a>
+<span class="sourceLineNo">602</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      RegionLocator regionLocator) throws IOException {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>  }<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.610"></a>
+<span class="sourceLineNo">611</span>    Configuration conf = job.getConfiguration();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    job.setOutputFormatClass(cls);<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span>    boolean writeMultipleTables = false;<a name="line.619"></a>
+<span class="sourceLineNo">620</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      writeMultipleTables = true;<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    // sort the incoming values.<a name="line.625"></a>
+<span class="sourceLineNo">626</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.626"></a>
+<span class="sourceLineNo">627</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      job.setReducerClass(CellSortReducer.class);<a name="line.629"></a>
+<span class="sourceLineNo">630</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>      job.setReducerClass(PutSortReducer.class);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      job.setReducerClass(TextSortReducer.class);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    } else {<a name="line.634"></a>
+<span class="sourceLineNo">635</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    }<a name="line.636"></a>
+<span class="sourceLineNo">637</span><a name="line.637"></a>
+<span class="sourceLineNo">638</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.639"></a>
+<span class="sourceLineNo">640</span>        CellSerialization.class.getName());<a name="line.640"></a>
+<span class="sourceLineNo">641</span><a name="line.641"></a>
+<span class="sourceLineNo">642</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.643"></a>
+<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
+<span class="sourceLineNo">645</span><a name="line.645"></a>
+<span class="sourceLineNo">646</span>    /* Now get the region start keys for every table required */<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.647"></a>
+<span class="sourceLineNo">648</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    {<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.653"></a>
+<span class="sourceLineNo">654</span>      String tn = writeMultipleTables?<a name="line.654"></a>
+<span class="sourceLineNo">655</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.655"></a>
+<span class="sourceLineNo">656</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      allTableNames.add(tn);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.660"></a>
+<span class="sourceLineNo">661</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.661"></a>
+<span class="sourceLineNo">662</span>            .toString(tableSeparator)));<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    // Use table's region boundaries for TOP split points.<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.665"></a>
+<span class="sourceLineNo">666</span>        "to match current region count for all tables");<a name="line.666"></a>
+<span class="sourceLineNo">667</span>    job.setNumReduceTasks(startKeys.size());<a name="line.667"></a>
+<span class="sourceLineNo">668</span><a name="line.668"></a>
+<span class="sourceLineNo">669</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.669"></a>
+<span class="sourceLineNo">670</span>    // Set compression algorithms based on column families<a name="line.670"></a>
+<span class="sourceLineNo">671</span><a name="line.671"></a>
+<span class="sourceLineNo">672</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.672"></a>
+<span class="sourceLineNo">673</span>            tableDescriptors));<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.674"></a>
 <span class="sourceLineNo">675</span>            tableDescriptors));<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.676"></a>
+<span class="sourceLineNo">676</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.676"></a>
 <span class="sourceLineNo">677</span>            tableDescriptors));<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>            tableDescriptors));<a name="line.679"></a>
-<span class="sourceLineNo">680</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        tableDescriptors));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.682"></a>
-<span class="sourceLineNo">683</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.683"></a>
-<span class="sourceLineNo">684</span><a name="line.684"></a>
-<span class="sourceLineNo">685</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    TableMapReduceUtil.initCredentials(job);<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.687"></a>
-<span class="sourceLineNo">688</span>  }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    Configuration conf = job.getConfiguration();<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.699"></a>
-<span class="sourceLineNo">700</span><a name="line.700"></a>
-<span class="sourceLineNo">701</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    // Set compression algorithms based on column families<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY,<a name="line.703"></a>
-<span class="sourceLineNo">704</span>        serializeColumnFamilyAttribute(compressionDetails, singleTableDescriptor));<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY,<a name="line.705"></a>
-<span class="sourceLineNo">706</span>        serializeColumnFamilyAttribute(blockSizeDetails, singleTableDescriptor));<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY,<a name="line.707"></a>
-<span class="sourceLineNo">708</span>        serializeColumnFamilyAttribute(bloomTypeDetails, singleTableDescriptor));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY,<a name="line.709"></a>
-<span class="sourceLineNo">710</span>        serializeColumnFamilyAttribute(bloomParamDetails, singleTableDescriptor));<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.711"></a>
-<span class="sourceLineNo">712</span>        serializeColumnFamilyAttribute(dataBlockEncodingDetails, singleTableDescriptor));<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    TableMapReduceUtil.initCredentials(job);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    LOG.info("Incremental table " + tableDescriptor.getTableName() + " output configured.");<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  /**<a name="line.719"></a>
-<span class="sourceLineNo">720</span>   * Runs inside the task to deserialize column family to compression algorithm<a name="line.720"></a>
-<span class="sourceLineNo">721</span>   * map from the configuration.<a name="line.721"></a>
-<span class="sourceLineNo">722</span>   *<a name="line.722"></a>
-<span class="sourceLineNo">723</span>   * @param conf to read the serialized values from<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * @return a map from column family to the configured compression algorithm<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   */<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  @VisibleForTesting<a name="line.726"></a>
-<span class="sourceLineNo">727</span>  static Map&lt;byte[], Algorithm&gt; createFamilyCompressionMap(Configuration<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      conf) {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        COMPRESSION_FAMILIES_CONF_KEY);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    Map&lt;byte[], Algorithm&gt; compressionMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue());<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      compressionMap.put(e.getKey(), algorithm);<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return compressionMap;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>   * Runs inside the task to deserialize column family to bloom filter type<a name="line.740"></a>
-<span class="sourceLineNo">741</span>   * map from the configuration.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   *<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * @param conf to read the serialized values from<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   * @return a map from column family to the the configured bloom filter type<a name="line.744"></a>
-<span class="sourceLineNo">745</span>   */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>  @VisibleForTesting<a name="line.746"></a>
-<span class="sourceLineNo">747</span>  static Map&lt;byte[], BloomType&gt; createFamilyBloomTypeMap(Configuration conf) {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        BLOOM_TYPE_FAMILIES_CONF_KEY);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    Map&lt;byte[], BloomType&gt; bloomTypeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      BloomType bloomType = BloomType.valueOf(e.getValue());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      bloomTypeMap.put(e.getKey(), bloomType);<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    }<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    return bloomTypeMap;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>  }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>  /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   * Runs inside the task to deserialize column family to bloom filter param<a name="line.759"></a>
-<span class="sourceLineNo">760</span>   * map from the configuration.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>   *<a name="line.761"></a>
-<span class="sourceLineNo">762</span>   * @param conf to read the serialized values from<a name="line.762"></a>
-<span class="sourceLineNo">763</span>   * @return a map from column family to the the configured bloom filter param<a name="line.763"></a>
-<span class="sourceLineNo">764</span>   */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>  @VisibleForTesting<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  static Map&lt;byte[], String&gt; createFamilyBloomParamMap(Configuration conf) {<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    return createFamilyConfValueMap(conf, BLOOM_PARAM_FAMILIES_CONF_KEY);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>  }<a name="line.768"></a>
-<span class="sourceLineNo">769</span><a name="line.769"></a>
-<span class="sourceLineNo">770</span><a name="line.770"></a>
-<span class="sourceLineNo">771</span>  /**<a name="line.771"></a>
-<span class="sourceLineNo">772</span>   * Runs inside the task to deserialize column family to block size<a name="line.772"></a>
-<span class="sourceLineNo">773</span>   * map from the configuration.<a name="line.773"></a>
-<span class="sourceLineNo">774</span>   *<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * @param conf to read the serialized values from<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * @return a map from column family to the configured block size<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   */<a name="line.777"></a>
-<span class="sourceLineNo">778</span>  @VisibleForTesting<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  static Map&lt;byte[], Integer&gt; createFamilyBlockSizeMap(Configuration conf) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        BLOCK_SIZE_FAMILIES_CONF_KEY);<a name="line.781"></a>
-<span class="sourceLineNo">782</span>    Map&lt;byte[], Integer&gt; blockSizeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      Integer blockSize = Integer.parseInt(e.getValue());<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      blockSizeMap.put(e.getKey(), blockSize);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    return blockSizeMap;<a name="line.787"></a>
-<span class="sourceLineNo">788</span>  }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>  /**<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * Runs inside the task to deserialize column family to data block encoding<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * type map from the configuration.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   *<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   * @param conf to read the serialized values from<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @return a map from column family to HFileDataBlockEncoder for the<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   *         configured data block type for the family<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   */<a name="line.797"></a>
-<span class="sourceLineNo">798</span>  @VisibleForTesting<a name="line.798"></a>
-<span class="sourceLineNo">799</span>  static Map&lt;byte[], DataBlockEncoding&gt; createFamilyDataBlockEncodingMap(<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      Configuration conf) {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        DATABLOCK_ENCODING_FAMILIES_CONF_KEY);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    Map&lt;byte[], DataBlockEncoding&gt; encoderMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.804"></a>
-<span class="sourceLineNo">805</span>      encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    }<a name="line.806"></a>
-<span class="sourceLineNo">807</span>    return encoderMap;<a name="line.807"></a>
-<span class="sourceLineNo">808</span>  }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span><a name="line.810"></a>
-<span class="sourceLineNo">811</span>  /**<a name="line.811"></a>
-<span class="sourceLineNo">812</span>   * Run inside the task to deserialize column family to given conf value map.<a name="line.812"></a>
-<span class="sourceLineNo">813</span>   *<a name="line.813"></a>
-<span class="sourceLineNo">814</span>   * @param conf to read the serialized values from<a name="line.814"></a>
-<span class="sourceLineNo">815</span>   * @param confName conf key to read from the configuration<a name="line.815"></a>
-<span class="sourceLineNo">816</span>   * @return a map of column family to the given configuration value<a name="line.816"></a>
-<span class="sourceLineNo">817</span>   */<a name="line.817"></a>
-<span class="sourceLineNo">818</span>  private static Map&lt;byte[], String&gt; createFamilyConfValueMap(<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      Configuration conf, String confName) {<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    Map&lt;byte[], String&gt; confValMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    String confVal = conf.get(confName, "");<a name="line.821"></a>
-<span class="sourceLineNo">822</span>    for (String familyConf : confVal.split("&amp;")) {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      String[] familySplit = familyConf.split("=");<a name="line.823"></a>
-<span class="sourceLineNo">824</span>      if (familySplit.length != 2) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>        continue;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      try {<a name="line.827"></a>
-<span class="sourceLineNo">828</span>        confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], "UTF-8")),<a name="line.828"></a>
-<span class="sourceLineNo">829</span>            URLDecoder.decode(familySplit[1], "UTF-8"));<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      } catch (UnsupportedEncodingException e) {<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        // will not happen with UTF-8 encoding<a name="line.831"></a>
-<span class="sourceLineNo">832</span>        throw new AssertionError(e);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>    }<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    return confValMap;<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * Configure &lt;code&gt;job&lt;/code&gt; with a TotalOrderPartitioner, partitioning against<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * &lt;code&gt;splitPoints&lt;/code&gt;. Cleans up the partitions file after job exists.<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   */<a name="line.841"></a>
-<span class="sourceLineNo">842</span>  static void configurePartitioner(Job job, List&lt;ImmutableBytesWritable&gt; splitPoints, boolean<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          writeMultipleTables)<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      throws IOException {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>    Configuration conf = job.getConfiguration();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    // create the partitions file<a name="line.846"></a>
-<span class="sourceLineNo">847</span>    FileSystem fs = FileSystem.get(conf);<a name="line.847"></a>
-<span class="sourceLineNo">848</span>    String hbaseTmpFsDir =<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            fs.getHomeDirectory() + "/hbase-staging");<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    fs.makeQualified(partitionsPath);<a name="line.852"></a>
-<span class="sourceLineNo">853</span>    writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    fs.deleteOnExit(partitionsPath);<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    // configure job to use it<a name="line.856"></a>
-<span class="sourceLineNo">857</span>    job.setPartitionerClass(TotalOrderPartitioner.class);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    TotalOrderPartitioner.setPartitionFile(conf, partitionsPath);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")<a name="line.861"></a>
-<span class="sourceLineNo">862</span>  @VisibleForTesting<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static String serializeColumnFamilyAttribute(Function&lt;ColumnFamilyDescriptor, String&gt; fn, List&lt;TableDescriptor&gt; allTables)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws UnsupportedEncodingException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    StringBuilder attributeValue = new StringBuilder();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    int i = 0;<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    for (TableDescriptor tableDescriptor : allTables) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      if (tableDescriptor == null) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        // could happen with mock table instance<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        // CODEREVIEW: Can I set an empty string in conf if mock table instance?<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        return "";<a name="line.871"></a>
-<span class="sourceLineNo">872</span>      }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>        if (i++ &gt; 0) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>          attributeValue.append('&amp;');<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        attributeValue.append(URLEncoder.encode(<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            Bytes.toString(combineTableNameSuffix(tableDescriptor.getTableName().getName(), familyDescriptor.getName())),<a name="line.878"></a>
-<span class="sourceLineNo">879</span>            "UTF-8"));<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        attributeValue.append('=');<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        attributeValue.append(URLEncoder.encode(fn.apply(familyDescriptor), "UTF-8"));<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>    // Get rid of the last ampersand<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    return attributeValue.toString();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>  }<a name="line.886"></a>
-<span class="sourceLineNo">887</span><a name="line.887"></a>
-<span class="sourceLineNo">888</span>  /**<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   * Serialize column family to compression algorithm map to configuration.<a name="line.889"></a>
-<span class="sourceLineNo">890</span>   * Invoked while configuring the MR job for incremental load.<a name="line.890"></a>
-<span class="sourceLineNo">891</span>   *<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * @param tableDescriptor to read the properties from<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * @param conf to persist serialized values into<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   * @throws IOException<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   *           on failure to read column family descriptors<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   */<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  @VisibleForTesting<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; compressionDetails = familyDescriptor -&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>          familyDescriptor.getCompressionType().getName();<a name="line.899"></a>
-<span class="sourceLineNo">900</span><a name="line.900"></a>
-<span class="sourceLineNo">901</span>  /**<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   * Serialize column family to block size map to configuration. Invoked while<a name="line.902"></a>
-<span class="sourceLineNo">903</span>   * configuring the MR job for incremental load.<a name="line.903"></a>
-<span class="sourceLineNo">904</span>   *<a name="line.904"></a>
-<span class="sourceLineNo">905</span>   * @param tableDescriptor<a name="line.905"></a>
-<span class="sourceLineNo">906</span>   *          to read the properties from<a name="line.906"></a>
-<span class="sourceLineNo">907</span>   * @param conf<a name="line.907"></a>
-<span class="sourceLineNo">908</span>   *          to persist serialized values into<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   *<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @throws IOException<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   *           on failure to read column family descriptors<a name="line.911"></a>
-<span class="sourceLineNo">912</span>   */<a name="line.912"></a>
-<span class="sourceLineNo">913</span>  @VisibleForTesting<a name="line.913"></a>
-<span class="sourceLineNo">914</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; blockSizeDetails = familyDescriptor -&gt; String<a name="line.914"></a>
-<span class="sourceLineNo">915</span>          .valueOf(familyDescriptor.getBlocksize());<a name="line.915"></a>
-<span class="sourceLineNo">916</span><a name="line.916"></a>
-<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   * Serialize column family to bloom type map to configuration. Invoked while<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * configuring the MR job for incremental load.<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   *<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   * @param tableDescriptor<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   *          to read the properties from<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param conf<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   *          to persist serialized values into<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   *<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   * @throws IOException<a name="line.926"></a>
-<span class="sourceLineNo">927</span>   *           on failure to read column family descriptors<a name="line.927"></a>
-<span class="sourceLineNo">928</span>   */<a name="line.928"></a>
-<span class="sourceLineNo">929</span>  @VisibleForTesting<a name="line.929"></a>
-<span class="sourceLineNo">930</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomTypeDetails = familyDescriptor -&gt; {<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    String bloomType = familyDescriptor.getBloomFilterType().toString();<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    if (bloomType == null) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      bloomType = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();<a name="line.933"></a>
-<span class="sourceLineNo">934</span>    }<a name="line.934"></a>
-<span class="sourceLineNo">935</span>    return bloomType;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>  };<a name="line.936"></a>
-<span class="sourceLineNo">937</span><a name="line.937"></a>
-<span class="sourceLineNo">938</span>  /**<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   * Serialize column family to bloom param map to configuration. Invoked while<a name="line.939"></a>
-<span class="sourceLineNo">940</span>   * configuring the MR job for incremental load.<a name="line.940"></a>
-<span class="sourceLineNo">941</span>   *<a name="line.941"></a>
-<span class="sourceLineNo">942</span>   * @param tableDescriptor<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   *          to read the properties from<a name="line.943"></a>
-<span class="sourceLineNo">944</span>   * @param conf<a name="line.944"></a>
-<span class="sourceLineNo">945</span>   *          to persist serialized values into<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   *<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * @throws IOException<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   *           on failure to read column family descriptors<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   */<a name="line.949"></a>
-<span class="sourceLineNo">950</span>  @VisibleForTesting<a name="line.950"></a>
-<span class="sourceLineNo">951</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomParamDetails = familyDescriptor -&gt; {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    BloomType bloomType = familyDescriptor.getBloomFilterType();<a name="line.952"></a>
-<span class="sourceLineNo">953</span>    String bloomParam = "";<a name="line.953"></a>
-<span class="sourceLineNo">954</span>    if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY);<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    } else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.DELIMITER_KEY);<a name="line.957"></a>
-<span class="sourceLineNo">958</span>    }<a name="line.958"></a>
-<span class="sourceLineNo">959</span>    return bloomParam;<a name="line.959"></a>
-<span class="sourceLineNo">960</span>  };<a name="line.960"></a>
-<span class="sourceLineNo">961</span><a name="line.961"></a>
-<span class="sourceLineNo">962</span>  /**<a name="line.962"></a>
-<span class="sourceLineNo">963</span>   * Serialize column family to data block encoding map to configuration.<a name="line.963"></a>
-<span class="sourceLineNo">964</span>   * Invoked while configuring the MR job for incremental load.<a name="line.964"></a>
-<span class="sourceLineNo">965</span>   *<a name="line.965"></a>
-<span class="sourceLineNo">966</span>   * @param tableDescriptor<a name="line.966"></a>
-<span class="sourceLineNo">967</span>   *          to read the properties from<a name="line.967"></a>
-<span class="sourceLineNo">968</span>   * @param conf<a name="line.968"></a>
-<span class="sourceLineNo">969</span>   *          to persist serialized values into<a name="line.969"></a>
-<span class="sourceLineNo">970</span>   * @throws IOException<a name="line.970"></a>
-<span class="sourceLineNo">971</span>   *           on failure to read column family descriptors<a name="line.971"></a>
-<span class="sourceLineNo">972</span>   */<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  @VisibleForTesting<a name="line.973"></a>
-<span class="sourceLineNo">974</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; dataBlockEncodingDetails = familyDescriptor -&gt; {<a name="line.974"></a>
-<span class="sourceLineNo">975</span>    DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    if (encoding == null) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      encoding = DataBlockEncoding.NONE;<a name="line.977"></a>
-<span class="sourceLineNo">978</span>    }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>    return encoding.toString();<a name="line.979"></a>
-<span class="sourceLineNo">980</span>  };<a name="line.980"></a>
-<span class="sourceLineNo">981</span><a name="line.981"></a>
-<span class="sourceLineNo">982</span>}<a name="line.982"></a>
+<span class="sourceLineNo">678</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.678"></a>
+<span class="sourceLineNo">679</span>        tableDescriptors));<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.680"></a>
+<span class="sourceLineNo">681</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.683"></a>
+<span class="sourceLineNo">684</span>    TableMapReduceUtil.initCredentials(job);<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.685"></a>
+<span class="sourceLineNo">686</span>  }<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.688"></a>
+<span class="sourceLineNo">689</span>      IOException {<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    Configuration conf = job.getConfiguration();<a name="line.690"></a>
+<span class="sourceLineNo">691</span><a name="line.691"></a>
+<span class="sourceLineNo">692</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.692"></a>
+<span class="sourceLineNo">693</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.694"></a>
+<span class="sourceLineNo">695</span><a name="line.695"></a>
+<span class="sourceLineNo">696</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.696"></a>
+<span class="sourceLineNo">697</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.697"></a>
+<span class="sourceLineNo">698</span><a name="line.698"></a>
+<span class="sourceLineNo">699</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    // Set compression algorithms based on column families<a name="line.

<TRUNCATED>

[08/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>

<TRUNCATED>

[34/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
index 97f206a..d314747 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -2139,7 +2139,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>cmp</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4076">cmp</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4075">cmp</a></pre>
 </li>
 </ul>
 </li>
@@ -2495,7 +2495,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>restoreHdfsIntegrity</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1095">restoreHdfsIntegrity</a>()
+<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1094">restoreHdfsIntegrity</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">This method determines if there are table integrity errors in HDFS.  If
@@ -2517,7 +2517,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>offlineReferenceFileRepair</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1143">offlineReferenceFileRepair</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1142">offlineReferenceFileRepair</a>()
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Scan all the store file names to find any lingering reference files,
@@ -2539,7 +2539,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>offlineHLinkFileRepair</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1197">offlineHLinkFileRepair</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1196">offlineHLinkFileRepair</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Scan all the store file names to find any lingering HFileLink files,
@@ -2558,7 +2558,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineFile</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1244">sidelineFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1243">sidelineFile</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                              org.apache.hadoop.fs.Path&nbsp;hbaseRoot,
                              org.apache.hadoop.fs.Path&nbsp;path)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2574,7 +2574,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>reportEmptyMetaCells</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1262">reportEmptyMetaCells</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1261">reportEmptyMetaCells</a>()</pre>
 <div class="block">TODO -- need to add tests for this.</div>
 </li>
 </ul>
@@ -2584,7 +2584,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>reportTablesInFlux</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1275">reportTablesInFlux</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1274">reportTablesInFlux</a>()</pre>
 <div class="block">TODO -- need to add tests for this.</div>
 </li>
 </ul>
@@ -2594,7 +2594,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getErrors</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1292">getErrors</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1291">getErrors</a>()</pre>
 </li>
 </ul>
 <a name="loadHdfsRegioninfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">
@@ -2603,7 +2603,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>loadHdfsRegioninfo</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1300">loadHdfsRegioninfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1299">loadHdfsRegioninfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Read the .regioninfo file from the file system.  If there is no
  .regioninfo, add it to the orphan hdfs region list.</div>
@@ -2619,7 +2619,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>loadHdfsRegionInfos</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1337">loadHdfsRegionInfos</a>()
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1336">loadHdfsRegionInfos</a>()
                                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Populate hbi's from regionInfos loaded from file system.</div>
@@ -2636,7 +2636,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getColumnFamilyList</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1423">getColumnFamilyList</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;columns,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1422">getColumnFamilyList</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;columns,
                                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">To get the column family list according to the column family dirs</div>
@@ -2657,7 +2657,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fabricateTableInfo</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1441">fabricateTableInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/FSTableDescriptors.html" title="class in org.apache.hadoop.hbase.util">FSTableDescriptors</a>&nbsp;fstd,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1440">fabricateTableInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/FSTableDescriptors.html" title="class in org.apache.hadoop.hbase.util">FSTableDescriptors</a>&nbsp;fstd,
                                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;columns)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2677,7 +2677,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixEmptyMetaCells</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1456">fixEmptyMetaCells</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1455">fixEmptyMetaCells</a>()
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta <br></div>
 <dl>
@@ -2692,7 +2692,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>fixOrphanTables</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1476">fixOrphanTables</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1475">fixOrphanTables</a>()
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">To fix orphan table by creating a .tableinfo file under tableDir <br>
  1. if TableInfo is cached, to recover the .tableinfo accordingly <br>
@@ -2712,7 +2712,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>createNewMeta</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1536">createNewMeta</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;walFactoryID)
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1535">createNewMeta</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;walFactoryID)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be
  sure to close it as well as the region when you're finished.</div>
@@ -2733,7 +2733,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>generatePuts</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1558">generatePuts</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1557">generatePuts</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Generate set of puts to add to new meta.  This expects the tables to be
  clean with no overlaps or holes.  If there are any problems it returns null.</div>
@@ -2751,7 +2751,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>suggestFixes</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1599">suggestFixes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1598">suggestFixes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Suggest fixes for each table</div>
 <dl>
@@ -2766,7 +2766,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>rebuildMeta</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1615">rebuildMeta</a>(boolean&nbsp;fix)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1614">rebuildMeta</a>(boolean&nbsp;fix)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Rebuilds meta from information in hdfs/fs.  Depends on configuration settings passed into
@@ -2788,7 +2788,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>removeHBCKMetaRecoveryWALDir</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1683">removeHBCKMetaRecoveryWALDir</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;walFactoryId)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1682">removeHBCKMetaRecoveryWALDir</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;walFactoryId)
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Removes the empty Meta recovery WAL directory.</div>
 <dl>
@@ -2806,7 +2806,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>logParallelMerge</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1699">logParallelMerge</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1698">logParallelMerge</a>()</pre>
 <div class="block">Log an appropriate message about whether or not overlapping merges are computed in parallel.</div>
 </li>
 </ul>
@@ -2816,7 +2816,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHdfsIntegrity</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1709">checkHdfsIntegrity</a>(boolean&nbsp;fixHoles,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1708">checkHdfsIntegrity</a>(boolean&nbsp;fixHoles,
                                                                     boolean&nbsp;fixOverlaps)
                                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2831,7 +2831,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getSidelineDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1729">getSidelineDir</a>()
+<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1728">getSidelineDir</a>()
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2845,7 +2845,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineRegionDir</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1742">sidelineRegionDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1741">sidelineRegionDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                             <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Sideline a region dir (instead of deleting it)</div>
@@ -2861,7 +2861,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineRegionDir</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1754">sidelineRegionDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1753">sidelineRegionDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;parentDir,
                                             <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -2883,7 +2883,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineTable</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1827">sidelineTable</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1826">sidelineTable</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                    org.apache.hadoop.fs.Path&nbsp;hbaseDir,
                    org.apache.hadoop.fs.Path&nbsp;backupHbaseDir)
@@ -2901,7 +2901,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelineOldMeta</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1846">sidelineOldMeta</a>()
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1845">sidelineOldMeta</a>()
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -2917,7 +2917,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>loadTableStates</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1869">loadTableStates</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1868">loadTableStates</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Load the list of disabled tables in ZK into local set.</div>
 <dl>
@@ -2933,7 +2933,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>isTableDisabled</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1883">isTableDisabled</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1882">isTableDisabled</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</pre>
 <div class="block">Check if the specified region's table is disabled.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -2947,7 +2947,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>loadHdfsRegionDirs</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1893">loadHdfsRegionDirs</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1892">loadHdfsRegionDirs</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Scan HDFS for all regions, recording their information into
@@ -2965,7 +2965,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>recordMetaRegion</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1951">recordMetaRegion</a>()
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1950">recordMetaRegion</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Record the location of the hbase:meta region as found in ZooKeeper.</div>
 <dl>
@@ -2980,7 +2980,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>createZooKeeperWatcher</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1988">createZooKeeperWatcher</a>()
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1987">createZooKeeperWatcher</a>()
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2994,7 +2994,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>processRegionServers</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2009">processRegionServers</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;regionServerList)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2008">processRegionServers</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt;&nbsp;regionServerList)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Contacts each regionserver and fetches metadata about regions.</div>
@@ -3013,7 +3013,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkAndFixConsistency</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2037">checkAndFixConsistency</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2036">checkAndFixConsistency</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                     org.apache.zookeeper.KeeperException,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -3032,7 +3032,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRegionConsistencyConcurrently</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2082">checkRegionConsistencyConcurrently</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.CheckRegionConsistencyWorkItem</a>&gt;&nbsp;workItems)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2081">checkRegionConsistencyConcurrently</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.CheckRegionConsistencyWorkItem</a>&gt;&nbsp;workItems)
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                                 org.apache.zookeeper.KeeperException,
                                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -3051,7 +3051,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>addSkippedRegion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2136">addSkippedRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2135">addSkippedRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)</pre>
 </li>
 </ul>
 <a name="checkAndFixTableStates--">
@@ -3060,7 +3060,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkAndFixTableStates</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2150">checkAndFixTableStates</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2149">checkAndFixTableStates</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check and fix table states, assumes full info available:
  - tableInfos
@@ -3077,7 +3077,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>preCheckPermission</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2192">preCheckPermission</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2191">preCheckPermission</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                 <a href="../../../../../org/apache/hadoop/hbase/security/AccessDeniedException.html" title="class in org.apache.hadoop.hbase.security">AccessDeniedException</a></pre>
 <dl>
@@ -3093,7 +3093,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteMetaRegion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2218">deleteMetaRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2217">deleteMetaRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Deletes region from meta table</div>
 <dl>
@@ -3108,7 +3108,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteMetaRegion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2225">deleteMetaRegion</a>(byte[]&nbsp;metaKey)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2224">deleteMetaRegion</a>(byte[]&nbsp;metaKey)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Deletes region from meta table</div>
 <dl>
@@ -3123,7 +3123,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>resetSplitParent</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2234">resetSplitParent</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2233">resetSplitParent</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Reset the split parent region info in meta table</div>
 <dl>
@@ -3138,7 +3138,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>offline</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2260">offline</a>(byte[]&nbsp;regionName)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2259">offline</a>(byte[]&nbsp;regionName)
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This backwards-compatibility wrapper for permanently offlining a region
  that should not be alive.  If the region server does not support the
@@ -3158,7 +3158,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>undeployRegions</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2289">undeployRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2288">undeployRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
@@ -3174,7 +3174,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>undeployRegionsForHbi</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2310">undeployRegionsForHbi</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2309">undeployRegionsForHbi</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
@@ -3190,7 +3190,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>closeRegion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2335">closeRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2334">closeRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Attempts to undeploy a region from a region server based in information in
@@ -3216,7 +3216,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>tryAssignmentRepair</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2383">tryAssignmentRepair</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2382">tryAssignmentRepair</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                  org.apache.zookeeper.KeeperException,
@@ -3235,7 +3235,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRegionConsistency</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2419">checkRegionConsistency</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2418">checkRegionConsistency</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
                                     <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                     org.apache.zookeeper.KeeperException,
@@ -3255,7 +3255,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkIntegrity</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2659">checkIntegrity</a>()
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2658">checkIntegrity</a>()
                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Checks tables integrity. Goes over all regions and scans the tables.
  Collects all the pieces for each table and checks if there are missing,
@@ -3272,7 +3272,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>loadTableInfosForTablesWithNoRegion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2726">loadTableInfosForTablesWithNoRegion</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2725">loadTableInfosForTablesWithNoRegion</a>()
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Loads table info's for tables that may not have been included, since there are no
  regions reported for the table, but table dir is there in hdfs</div>
@@ -3288,7 +3288,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>mergeRegionDirs</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2746">mergeRegionDirs</a>(org.apache.hadoop.fs.Path&nbsp;targetRegionDir,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2745">mergeRegionDirs</a>(org.apache.hadoop.fs.Path&nbsp;targetRegionDir,
                            <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;contained)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Merge hdfs data by moving from contained HbckInfo into targetRegionDir.</div>
@@ -3306,7 +3306,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>dumpOverlapProblems</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3552">dumpOverlapProblems</a>(org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;regions)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3551">dumpOverlapProblems</a>(org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;regions)</pre>
 </li>
 </ul>
 <a name="dumpSidelinedRegions-java.util.Map-">
@@ -3315,7 +3315,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>dumpSidelinedRegions</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3565">dumpSidelinedRegions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.Path,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;regions)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3564">dumpSidelinedRegions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.Path,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;regions)</pre>
 </li>
 </ul>
 <a name="getOverlapGroups-org.apache.hadoop.hbase.TableName-">
@@ -3324,7 +3324,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getOverlapGroups</h4>
-<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3577">getOverlapGroups</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</pre>
+<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3576">getOverlapGroups</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</pre>
 </li>
 </ul>
 <a name="getTables-java.util.concurrent.atomic.AtomicInteger-">
@@ -3333,7 +3333,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getTables</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3592">getTables</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&nbsp;numSkipped)</pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3591">getTables</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&nbsp;numSkipped)</pre>
 <div class="block">Return a list of user-space table names whose metadata have not been
  modified in the last few milliseconds specified by timelag
  if any of the REGIONINFO_QUALIFIER, SERVER_QUALIFIER, STARTCODE_QUALIFIER,
@@ -3353,7 +3353,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableDescriptors</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3612">getTableDescriptors</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tableNames)</pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3611">getTableDescriptors</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&gt;&nbsp;tableNames)</pre>
 </li>
 </ul>
 <a name="getOrCreateInfo-java.lang.String-">
@@ -3362,7 +3362,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getOrCreateInfo</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3629">getOrCreateInfo</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3628">getOrCreateInfo</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Gets the entry in regionInfo corresponding to the the given encoded
  region name. If the region has not been seen yet, a new entry is added
  and returned.</div>
@@ -3374,7 +3374,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkAndFixReplication</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3638">checkAndFixReplication</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3637">checkAndFixReplication</a>()
                              throws <a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationException.html" title="class in org.apache.hadoop.hbase.replication">ReplicationException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3388,7 +3388,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>checkMetaRegion</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3657">checkMetaRegion</a>()
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3656">checkMetaRegion</a>()
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                         org.apache.zookeeper.KeeperException,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -3410,7 +3410,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>unassignMetaReplica</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3711">unassignMetaReplica</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3710">unassignMetaReplica</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
                                  org.apache.zookeeper.KeeperException</pre>
@@ -3428,7 +3428,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>assignMetaReplica</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3717">assignMetaReplica</a>(int&nbsp;replicaId)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3716">assignMetaReplica</a>(int&nbsp;replicaId)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                org.apache.zookeeper.KeeperException,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
@@ -3446,7 +3446,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>loadMetaEntries</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3736">loadMetaEntries</a>()
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3735">loadMetaEntries</a>()
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Scan hbase:meta, adding all regions found to the regionInfo map.</div>
 <dl>
@@ -3461,7 +3461,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>printTableSummary</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4127">printTableSummary</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4126">printTableSummary</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt;&nbsp;tablesInfo)</pre>
 <div class="block">Prints summary of all tables found on the system.</div>
 </li>
 </ul>
@@ -3471,7 +3471,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>getErrorReporter</h4>
-<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4162">getErrorReporter</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4161">getErrorReporter</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3485,7 +3485,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setDisplayFullReport</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4561">setDisplayFullReport</a>()</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4560">setDisplayFullReport</a>()</pre>
 <div class="block">Display the full report from fsck. This displays all live and dead region
  servers, and all known regions.</div>
 </li>
@@ -3496,7 +3496,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setForceExclusive</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4568">setForceExclusive</a>()</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4567">setForceExclusive</a>()</pre>
 <div class="block">Set exclusive mode.</div>
 </li>
 </ul>
@@ -3506,7 +3506,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>isExclusive</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4575">isExclusive</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4574">isExclusive</a>()</pre>
 <div class="block">Only one instance of hbck can modify HBase at a time.</div>
 </li>
 </ul>
@@ -3516,7 +3516,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setSummary</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4583">setSummary</a>()</pre>
+<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4582">setSummary</a>()</pre>
 <div class="block">Set summary mode.
  Print only summary of the tables and status (OK or INCONSISTENT)</div>
 </li>
@@ -3527,7 +3527,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setCheckMetaOnly</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4591">setCheckMetaOnly</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4590">setCheckMetaOnly</a>()</pre>
 <div class="block">Set hbase:meta check mode.
  Print only info about hbase:meta table deployment/state</div>
 </li>
@@ -3538,7 +3538,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setRegionBoundariesCheck</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4598">setRegionBoundariesCheck</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4597">setRegionBoundariesCheck</a>()</pre>
 <div class="block">Set region boundaries check mode.</div>
 </li>
 </ul>
@@ -3548,7 +3548,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixReplication</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4605">setFixReplication</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4604">setFixReplication</a>(boolean&nbsp;shouldFix)</pre>
 <div class="block">Set replication fix mode.</div>
 </li>
 </ul>
@@ -3558,7 +3558,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setCleanReplicationBarrier</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4610">setCleanReplicationBarrier</a>(boolean&nbsp;shouldClean)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4609">setCleanReplicationBarrier</a>(boolean&nbsp;shouldClean)</pre>
 </li>
 </ul>
 <a name="setShouldRerun--">
@@ -3567,7 +3567,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setShouldRerun</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4620">setShouldRerun</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4619">setShouldRerun</a>()</pre>
 <div class="block">Check if we should rerun fsck again. This checks if we've tried to
  fix something and we should rerun fsck tool again.
  Display the full report from fsck. This displays all live and dead
@@ -3580,7 +3580,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldRerun</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4624">shouldRerun</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4623">shouldRerun</a>()</pre>
 </li>
 </ul>
 <a name="setFixAssignments-boolean-">
@@ -3589,7 +3589,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixAssignments</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4632">setFixAssignments</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4631">setFixAssignments</a>(boolean&nbsp;shouldFix)</pre>
 <div class="block">Fix inconsistencies found by fsck. This should try to fix errors (if any)
  found by fsck utility.</div>
 </li>
@@ -3600,7 +3600,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixAssignments</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4637">shouldFixAssignments</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4636">shouldFixAssignments</a>()</pre>
 </li>
 </ul>
 <a name="setFixMeta-boolean-">
@@ -3609,7 +3609,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixMeta</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4641">setFixMeta</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4640">setFixMeta</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixMeta--">
@@ -3618,7 +3618,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixMeta</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4646">shouldFixMeta</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4645">shouldFixMeta</a>()</pre>
 </li>
 </ul>
 <a name="setFixEmptyMetaCells-boolean-">
@@ -3627,7 +3627,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixEmptyMetaCells</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4650">setFixEmptyMetaCells</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4649">setFixEmptyMetaCells</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixEmptyMetaCells--">
@@ -3636,7 +3636,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixEmptyMetaCells</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4655">shouldFixEmptyMetaCells</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4654">shouldFixEmptyMetaCells</a>()</pre>
 </li>
 </ul>
 <a name="setCheckHdfs-boolean-">
@@ -3645,7 +3645,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setCheckHdfs</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4659">setCheckHdfs</a>(boolean&nbsp;checking)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4658">setCheckHdfs</a>(boolean&nbsp;checking)</pre>
 </li>
 </ul>
 <a name="shouldCheckHdfs--">
@@ -3654,7 +3654,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCheckHdfs</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4663">shouldCheckHdfs</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4662">shouldCheckHdfs</a>()</pre>
 </li>
 </ul>
 <a name="setFixHdfsHoles-boolean-">
@@ -3663,7 +3663,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixHdfsHoles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4667">setFixHdfsHoles</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4666">setFixHdfsHoles</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixHdfsHoles--">
@@ -3672,7 +3672,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixHdfsHoles</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4672">shouldFixHdfsHoles</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4671">shouldFixHdfsHoles</a>()</pre>
 </li>
 </ul>
 <a name="setFixTableOrphans-boolean-">
@@ -3681,7 +3681,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixTableOrphans</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4676">setFixTableOrphans</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4675">setFixTableOrphans</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixTableOrphans--">
@@ -3690,7 +3690,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixTableOrphans</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4681">shouldFixTableOrphans</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4680">shouldFixTableOrphans</a>()</pre>
 </li>
 </ul>
 <a name="setFixHdfsOverlaps-boolean-">
@@ -3699,7 +3699,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixHdfsOverlaps</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4685">setFixHdfsOverlaps</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4684">setFixHdfsOverlaps</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixHdfsOverlaps--">
@@ -3708,7 +3708,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixHdfsOverlaps</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4690">shouldFixHdfsOverlaps</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4689">shouldFixHdfsOverlaps</a>()</pre>
 </li>
 </ul>
 <a name="setFixHdfsOrphans-boolean-">
@@ -3717,7 +3717,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixHdfsOrphans</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4694">setFixHdfsOrphans</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4693">setFixHdfsOrphans</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixHdfsOrphans--">
@@ -3726,7 +3726,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixHdfsOrphans</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4699">shouldFixHdfsOrphans</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4698">shouldFixHdfsOrphans</a>()</pre>
 </li>
 </ul>
 <a name="setFixVersionFile-boolean-">
@@ -3735,7 +3735,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixVersionFile</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4703">setFixVersionFile</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4702">setFixVersionFile</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixVersionFile--">
@@ -3744,7 +3744,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixVersionFile</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4708">shouldFixVersionFile</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4707">shouldFixVersionFile</a>()</pre>
 </li>
 </ul>
 <a name="setSidelineBigOverlaps-boolean-">
@@ -3753,7 +3753,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setSidelineBigOverlaps</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4712">setSidelineBigOverlaps</a>(boolean&nbsp;sbo)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4711">setSidelineBigOverlaps</a>(boolean&nbsp;sbo)</pre>
 </li>
 </ul>
 <a name="shouldSidelineBigOverlaps--">
@@ -3762,7 +3762,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldSidelineBigOverlaps</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4716">shouldSidelineBigOverlaps</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4715">shouldSidelineBigOverlaps</a>()</pre>
 </li>
 </ul>
 <a name="setFixSplitParents-boolean-">
@@ -3771,7 +3771,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixSplitParents</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4720">setFixSplitParents</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4719">setFixSplitParents</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="setRemoveParents-boolean-">
@@ -3780,7 +3780,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setRemoveParents</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4725">setRemoveParents</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4724">setRemoveParents</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixSplitParents--">
@@ -3789,7 +3789,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixSplitParents</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4730">shouldFixSplitParents</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4729">shouldFixSplitParents</a>()</pre>
 </li>
 </ul>
 <a name="shouldRemoveParents--">
@@ -3798,7 +3798,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldRemoveParents</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4734">shouldRemoveParents</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4733">shouldRemoveParents</a>()</pre>
 </li>
 </ul>
 <a name="setFixReferenceFiles-boolean-">
@@ -3807,7 +3807,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixReferenceFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4738">setFixReferenceFiles</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4737">setFixReferenceFiles</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixReferenceFiles--">
@@ -3816,7 +3816,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixReferenceFiles</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4743">shouldFixReferenceFiles</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4742">shouldFixReferenceFiles</a>()</pre>
 </li>
 </ul>
 <a name="setFixHFileLinks-boolean-">
@@ -3825,7 +3825,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>setFixHFileLinks</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4747">setFixHFileLinks</a>(boolean&nbsp;shouldFix)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4746">setFixHFileLinks</a>(boolean&nbsp;shouldFix)</pre>
 </li>
 </ul>
 <a name="shouldFixHFileLinks--">
@@ -3834,7 +3834,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldFixHFileLinks</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4752">shouldFixHFileLinks</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4751">shouldFixHFileLinks</a>()</pre>
 </li>
 </ul>
 <a name="shouldIgnorePreCheckPermission--">
@@ -3843,7 +3843,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldIgnorePreCheckPermission</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4756">shouldIgnorePreCheckPermission</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4755">shouldIgnorePreCheckPermission</a>()</pre>
 </li>
 </ul>
 <a name="setIgnorePreCheckPermission-boolean-">
@@ -3852,7 +3852,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/

<TRUNCATED>

[06/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.

<TRUNCATED>

[13/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="line.1543">

<TRUNCATED>

[37/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
index 1b532de..11a20a4 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4169">HBaseFsck.ErrorReporter.ERROR_CODE</a>
+<pre>public static enum <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4168">HBaseFsck.ErrorReporter.ERROR_CODE</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;</pre>
 </li>
 </ul>
@@ -315,7 +315,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>UNKNOWN</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">UNKNOWN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4169">UNKNOWN</a></pre>
 </li>
 </ul>
 <a name="NO_META_REGION">
@@ -324,7 +324,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_META_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NO_META_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4169">NO_META_REGION</a></pre>
 </li>
 </ul>
 <a name="NULL_META_REGION">
@@ -333,7 +333,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NULL_META_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NULL_META_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4169">NULL_META_REGION</a></pre>
 </li>
 </ul>
 <a name="NO_VERSION_FILE">
@@ -342,7 +342,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_VERSION_FILE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NO_VERSION_FILE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4169">NO_VERSION_FILE</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_META_HDFS">
@@ -351,7 +351,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_META_HDFS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_META_HDFS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4169">NOT_IN_META_HDFS</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_META">
@@ -360,7 +360,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_META</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_META</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4169">NOT_IN_META</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_META_OR_DEPLOYED">
@@ -369,7 +369,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_META_OR_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_IN_META_OR_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_META_OR_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_HDFS_OR_DEPLOYED">
@@ -378,7 +378,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_HDFS_OR_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_IN_HDFS_OR_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_HDFS_OR_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="NOT_IN_HDFS">
@@ -387,7 +387,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_IN_HDFS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_IN_HDFS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">NOT_IN_HDFS</a></pre>
 </li>
 </ul>
 <a name="SERVER_DOES_NOT_MATCH_META">
@@ -396,7 +396,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SERVER_DOES_NOT_MATCH_META</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">SERVER_DOES_NOT_MATCH_META</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4170">SERVER_DOES_NOT_MATCH_META</a></pre>
 </li>
 </ul>
 <a name="NOT_DEPLOYED">
@@ -405,7 +405,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NOT_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">NOT_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">NOT_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="MULTI_DEPLOYED">
@@ -414,7 +414,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>MULTI_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">MULTI_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">MULTI_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="SHOULD_NOT_BE_DEPLOYED">
@@ -423,7 +423,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>SHOULD_NOT_BE_DEPLOYED</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">SHOULD_NOT_BE_DEPLOYED</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">SHOULD_NOT_BE_DEPLOYED</a></pre>
 </li>
 </ul>
 <a name="MULTI_META_REGION">
@@ -432,7 +432,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>MULTI_META_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">MULTI_META_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">MULTI_META_REGION</a></pre>
 </li>
 </ul>
 <a name="RS_CONNECT_FAILURE">
@@ -441,7 +441,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>RS_CONNECT_FAILURE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">RS_CONNECT_FAILURE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4171">RS_CONNECT_FAILURE</a></pre>
 </li>
 </ul>
 <a name="FIRST_REGION_STARTKEY_NOT_EMPTY">
@@ -450,7 +450,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>FIRST_REGION_STARTKEY_NOT_EMPTY</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">FIRST_REGION_STARTKEY_NOT_EMPTY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">FIRST_REGION_STARTKEY_NOT_EMPTY</a></pre>
 </li>
 </ul>
 <a name="LAST_REGION_ENDKEY_NOT_EMPTY">
@@ -459,7 +459,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LAST_REGION_ENDKEY_NOT_EMPTY</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">LAST_REGION_ENDKEY_NOT_EMPTY</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">LAST_REGION_ENDKEY_NOT_EMPTY</a></pre>
 </li>
 </ul>
 <a name="DUPE_STARTKEYS">
@@ -468,7 +468,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DUPE_STARTKEYS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">DUPE_STARTKEYS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4172">DUPE_STARTKEYS</a></pre>
 </li>
 </ul>
 <a name="HOLE_IN_REGION_CHAIN">
@@ -477,7 +477,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>HOLE_IN_REGION_CHAIN</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">HOLE_IN_REGION_CHAIN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">HOLE_IN_REGION_CHAIN</a></pre>
 </li>
 </ul>
 <a name="OVERLAP_IN_REGION_CHAIN">
@@ -486,7 +486,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>OVERLAP_IN_REGION_CHAIN</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">OVERLAP_IN_REGION_CHAIN</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">OVERLAP_IN_REGION_CHAIN</a></pre>
 </li>
 </ul>
 <a name="REGION_CYCLE">
@@ -495,7 +495,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_CYCLE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">REGION_CYCLE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">REGION_CYCLE</a></pre>
 </li>
 </ul>
 <a name="DEGENERATE_REGION">
@@ -504,7 +504,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DEGENERATE_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">DEGENERATE_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4173">DEGENERATE_REGION</a></pre>
 </li>
 </ul>
 <a name="ORPHAN_HDFS_REGION">
@@ -513,7 +513,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>ORPHAN_HDFS_REGION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">ORPHAN_HDFS_REGION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">ORPHAN_HDFS_REGION</a></pre>
 </li>
 </ul>
 <a name="LINGERING_SPLIT_PARENT">
@@ -522,7 +522,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LINGERING_SPLIT_PARENT</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">LINGERING_SPLIT_PARENT</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">LINGERING_SPLIT_PARENT</a></pre>
 </li>
 </ul>
 <a name="NO_TABLEINFO_FILE">
@@ -531,7 +531,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_TABLEINFO_FILE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">NO_TABLEINFO_FILE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">NO_TABLEINFO_FILE</a></pre>
 </li>
 </ul>
 <a name="LINGERING_REFERENCE_HFILE">
@@ -540,7 +540,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LINGERING_REFERENCE_HFILE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">LINGERING_REFERENCE_HFILE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4174">LINGERING_REFERENCE_HFILE</a></pre>
 </li>
 </ul>
 <a name="LINGERING_HFILELINK">
@@ -549,7 +549,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>LINGERING_HFILELINK</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">LINGERING_HFILELINK</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">LINGERING_HFILELINK</a></pre>
 </li>
 </ul>
 <a name="WRONG_USAGE">
@@ -558,7 +558,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>WRONG_USAGE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">WRONG_USAGE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">WRONG_USAGE</a></pre>
 </li>
 </ul>
 <a name="EMPTY_META_CELL">
@@ -567,7 +567,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>EMPTY_META_CELL</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">EMPTY_META_CELL</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">EMPTY_META_CELL</a></pre>
 </li>
 </ul>
 <a name="EXPIRED_TABLE_LOCK">
@@ -576,7 +576,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>EXPIRED_TABLE_LOCK</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">EXPIRED_TABLE_LOCK</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">EXPIRED_TABLE_LOCK</a></pre>
 </li>
 </ul>
 <a name="BOUNDARIES_ERROR">
@@ -585,7 +585,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>BOUNDARIES_ERROR</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">BOUNDARIES_ERROR</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4175">BOUNDARIES_ERROR</a></pre>
 </li>
 </ul>
 <a name="ORPHAN_TABLE_STATE">
@@ -594,7 +594,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>ORPHAN_TABLE_STATE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">ORPHAN_TABLE_STATE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">ORPHAN_TABLE_STATE</a></pre>
 </li>
 </ul>
 <a name="NO_TABLE_STATE">
@@ -603,7 +603,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>NO_TABLE_STATE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">NO_TABLE_STATE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">NO_TABLE_STATE</a></pre>
 </li>
 </ul>
 <a name="UNDELETED_REPLICATION_QUEUE">
@@ -612,7 +612,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>UNDELETED_REPLICATION_QUEUE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">UNDELETED_REPLICATION_QUEUE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">UNDELETED_REPLICATION_QUEUE</a></pre>
 </li>
 </ul>
 <a name="DUPE_ENDKEYS">
@@ -621,7 +621,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>DUPE_ENDKEYS</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">DUPE_ENDKEYS</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4176">DUPE_ENDKEYS</a></pre>
 </li>
 </ul>
 <a name="UNSUPPORTED_OPTION">
@@ -630,7 +630,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>UNSUPPORTED_OPTION</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">UNSUPPORTED_OPTION</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">UNSUPPORTED_OPTION</a></pre>
 </li>
 </ul>
 <a name="INVALID_TABLE">
@@ -639,7 +639,7 @@ the order they are declared.</div>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>INVALID_TABLE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4178">INVALID_TABLE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.4177">INVALID_TABLE</a></pre>
 </li>
 </ul>
 </li>
@@ -656,7 +656,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3893">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3892">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -676,7 +676,7 @@ for (HBaseFsck.ErrorReporter.ERROR_CODE c : HBaseFsck.ErrorReporter.ERROR_CODE.v
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3893">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html#line.3892">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
index 2c08d61..372f94d 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4168">HBaseFsck.ErrorReporter</a></pre>
+<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4167">HBaseFsck.ErrorReporter</a></pre>
 </li>
 </ul>
 </div>
@@ -234,7 +234,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>clear</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4180">clear</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4179">clear</a>()</pre>
 </li>
 </ul>
 <a name="report-java.lang.String-">
@@ -243,7 +243,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>report</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4181">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4180">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
 <a name="reportError-java.lang.String-">
@@ -252,7 +252,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4182">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4181">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
 <a name="reportError-org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE-java.lang.String-">
@@ -261,7 +261,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4183">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4182">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
@@ -271,7 +271,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4184">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4183">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 </li>
@@ -282,7 +282,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4185">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4184">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info)</pre>
@@ -294,7 +294,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4186">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4185">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                  <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info1,
@@ -307,7 +307,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>summarize</h4>
-<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4193">summarize</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4192">summarize</a>()</pre>
 </li>
 </ul>
 <a name="detail-java.lang.String-">
@@ -316,7 +316,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>detail</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4194">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;details)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4193">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;details)</pre>
 </li>
 </ul>
 <a name="getErrorList--">
@@ -325,7 +325,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>getErrorList</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4195">getErrorList</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4194">getErrorList</a>()</pre>
 </li>
 </ul>
 <a name="progress--">
@@ -334,7 +334,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>progress</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4196">progress</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4195">progress</a>()</pre>
 </li>
 </ul>
 <a name="print-java.lang.String-">
@@ -343,7 +343,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>print</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4197">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4196">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 </li>
 </ul>
 <a name="resetErrors--">
@@ -352,7 +352,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>resetErrors</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4198">resetErrors</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4197">resetErrors</a>()</pre>
 </li>
 </ul>
 <a name="tableHasErrors-org.apache.hadoop.hbase.util.HBaseFsck.TableInfo-">
@@ -361,7 +361,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>tableHasErrors</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4199">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#line.4198">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
index 5a17750..26c60f6 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4936">HBaseFsck.HBaseFsckTool</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4935">HBaseFsck.HBaseFsckTool</a>
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool</pre>
 <div class="block">This is a Tool wrapper that gathers -Dxxx=yyy configuration settings from the command line.</div>
@@ -207,7 +207,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HBaseFsckTool</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4937">HBaseFsckTool</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4936">HBaseFsckTool</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 </li>
@@ -224,7 +224,7 @@ implements org.apache.hadoop.util.Tool</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4939">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html#line.4938">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index 8dffb7c..57e02f4 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3907">HBaseFsck.HbckInfo</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3906">HBaseFsck.HbckInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" title="interface in org.apache.hadoop.hbase.util">KeyRange</a></pre>
 <div class="block">Maintain information about a particular region.  It gathers information
@@ -305,7 +305,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>metaEntry</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3908">metaEntry</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3907">metaEntry</a></pre>
 </li>
 </ul>
 <a name="hdfsEntry">
@@ -314,7 +314,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsEntry</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HdfsEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3909">hdfsEntry</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HdfsEntry</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3908">hdfsEntry</a></pre>
 </li>
 </ul>
 <a name="deployedEntries">
@@ -323,7 +323,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedEntries</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.OnlineEntry</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3910">deployedEntries</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.OnlineEntry</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3909">deployedEntries</a></pre>
 </li>
 </ul>
 <a name="deployedOn">
@@ -332,7 +332,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedOn</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3911">deployedOn</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3910">deployedOn</a></pre>
 </li>
 </ul>
 <a name="skipChecks">
@@ -341,7 +341,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>skipChecks</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3912">skipChecks</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3911">skipChecks</a></pre>
 </li>
 </ul>
 <a name="isMerged">
@@ -350,7 +350,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>isMerged</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3913">isMerged</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3912">isMerged</a></pre>
 </li>
 </ul>
 <a name="deployedReplicaId">
@@ -359,7 +359,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedReplicaId</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3914">deployedReplicaId</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3913">deployedReplicaId</a></pre>
 </li>
 </ul>
 <a name="primaryHRIForDeployedReplica">
@@ -368,7 +368,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockListLast">
 <li class="blockList">
 <h4>primaryHRIForDeployedReplica</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3915">primaryHRIForDeployedReplica</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3914">primaryHRIForDeployedReplica</a></pre>
 </li>
 </ul>
 </li>
@@ -385,7 +385,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HbckInfo</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3917">HbckInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a>&nbsp;metaEntry)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3916">HbckInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.MetaEntry</a>&nbsp;metaEntry)</pre>
 </li>
 </ul>
 </li>
@@ -402,7 +402,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicaId</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3921">getReplicaId</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3920">getReplicaId</a>()</pre>
 </li>
 </ul>
 <a name="addServer-org.apache.hadoop.hbase.client.RegionInfo-org.apache.hadoop.hbase.ServerName-">
@@ -411,7 +411,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>addServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3925">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3924">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;hri,
                       <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server)</pre>
 </li>
 </ul>
@@ -421,7 +421,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3938">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3937">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -434,7 +434,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getStartKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3950">getStartKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3949">getStartKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html#getStartKey--">getStartKey</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" title="interface in org.apache.hadoop.hbase.util">KeyRange</a></code></dd>
@@ -447,7 +447,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getEndKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3962">getEndKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3961">getEndKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html#getEndKey--">getEndKey</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" title="interface in org.apache.hadoop.hbase.util">KeyRange</a></code></dd>
@@ -460,7 +460,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3973">getTableName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3972">getTableName</a>()</pre>
 </li>
 </ul>
 <a name="getRegionNameAsString--">
@@ -469,7 +469,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionNameAsString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3990">getRegionNameAsString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.3989">getRegionNameAsString</a>()</pre>
 </li>
 </ul>
 <a name="getRegionName--">
@@ -478,7 +478,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionName</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4006">getRegionName</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4005">getRegionName</a>()</pre>
 </li>
 </ul>
 <a name="getPrimaryHRIForDeployedReplica--">
@@ -487,7 +487,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getPrimaryHRIForDeployedReplica</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4020">getPrimaryHRIForDeployedReplica</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4019">getPrimaryHRIForDeployedReplica</a>()</pre>
 </li>
 </ul>
 <a name="getHdfsRegionDir--">
@@ -496,7 +496,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getHdfsRegionDir</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4024">getHdfsRegionDir</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4023">getHdfsRegionDir</a>()</pre>
 </li>
 </ul>
 <a name="containsOnlyHdfsEdits--">
@@ -505,7 +505,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>containsOnlyHdfsEdits</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4031">containsOnlyHdfsEdits</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4030">containsOnlyHdfsEdits</a>()</pre>
 </li>
 </ul>
 <a name="isHdfsRegioninfoPresent--">
@@ -514,7 +514,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>isHdfsRegioninfoPresent</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4038">isHdfsRegioninfoPresent</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4037">isHdfsRegioninfoPresent</a>()</pre>
 </li>
 </ul>
 <a name="getModTime--">
@@ -523,7 +523,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getModTime</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4045">getModTime</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4044">getModTime</a>()</pre>
 </li>
 </ul>
 <a name="getHdfsHRI--">
@@ -532,7 +532,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>getHdfsHRI</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4052">getHdfsHRI</a>()</pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4051">getHdfsHRI</a>()</pre>
 </li>
 </ul>
 <a name="setSkipChecks-boolean-">
@@ -541,7 +541,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>setSkipChecks</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4059">setSkipChecks</a>(boolean&nbsp;skipChecks)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4058">setSkipChecks</a>(boolean&nbsp;skipChecks)</pre>
 </li>
 </ul>
 <a name="isSkipChecks--">
@@ -550,7 +550,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>isSkipChecks</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4063">isSkipChecks</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4062">isSkipChecks</a>()</pre>
 </li>
 </ul>
 <a name="setMerged-boolean-">
@@ -559,7 +559,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockList">
 <li class="blockList">
 <h4>setMerged</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4067">setMerged</a>(boolean&nbsp;isMerged)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4066">setMerged</a>(boolean&nbsp;isMerged)</pre>
 </li>
 </ul>
 <a name="isMerged--">
@@ -568,7 +568,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/KeyRange.html" t
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isMerged</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4071">isMerged</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html#line.4070">isMerged</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
index 11c97b8..ff917eb 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html
@@ -107,7 +107,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3882">HBaseFsck.HdfsEntry</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3881">HBaseFsck.HdfsEntry</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Stores the regioninfo entries from HDFS</div>
 </li>
@@ -201,7 +201,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hri</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3883">hri</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3882">hri</a></pre>
 </li>
 </ul>
 <a name="hdfsRegionDir">
@@ -210,7 +210,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsRegionDir</h4>
-<pre>org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3884">hdfsRegionDir</a></pre>
+<pre>org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3883">hdfsRegionDir</a></pre>
 </li>
 </ul>
 <a name="hdfsRegionDirModTime">
@@ -219,7 +219,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsRegionDirModTime</h4>
-<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3885">hdfsRegionDirModTime</a></pre>
+<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3884">hdfsRegionDirModTime</a></pre>
 </li>
 </ul>
 <a name="hdfsRegioninfoFilePresent">
@@ -228,7 +228,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsRegioninfoFilePresent</h4>
-<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3886">hdfsRegioninfoFilePresent</a></pre>
+<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3885">hdfsRegioninfoFilePresent</a></pre>
 </li>
 </ul>
 <a name="hdfsOnlyEdits">
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hdfsOnlyEdits</h4>
-<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3887">hdfsOnlyEdits</a></pre>
+<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3886">hdfsOnlyEdits</a></pre>
 </li>
 </ul>
 </li>
@@ -254,7 +254,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HdfsEntry</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3882">HdfsEntry</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.HdfsEntry.html#line.3881">HdfsEntry</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
index 8d6a0bc..3ce7249 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3831">HBaseFsck.MetaEntry</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3830">HBaseFsck.MetaEntry</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></pre>
 <div class="block">Stores the regioninfo entries scanned from META</div>
 </li>
@@ -264,7 +264,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>regionServer</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3832">regionServer</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3831">regionServer</a></pre>
 </li>
 </ul>
 <a name="modTime">
@@ -273,7 +273,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>modTime</h4>
-<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3833">modTime</a></pre>
+<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3832">modTime</a></pre>
 </li>
 </ul>
 <a name="splitA">
@@ -282,7 +282,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>splitA</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3834">splitA</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3833">splitA</a></pre>
 </li>
 </ul>
 <a name="splitB">
@@ -291,7 +291,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockListLast">
 <li class="blockList">
 <h4>splitB</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3834">splitB</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3833">splitB</a></pre>
 </li>
 </ul>
 </li>
@@ -308,7 +308,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>MetaEntry</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3836">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3835">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
                  <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;regionServer,
                  long&nbsp;modTime)</pre>
 </li>
@@ -319,7 +319,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MetaEntry</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3840">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3839">MetaEntry</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;rinfo,
                  <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;regionServer,
                  long&nbsp;modTime,
                  <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;splitA,
@@ -340,7 +340,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3850">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3849">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html#equals-java.lang.Object-">equals</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></code></dd>
@@ -355,7 +355,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title=
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3864">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html#line.3863">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html#hashCode--">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/HRegionInfo.html" title="class in org.apache.hadoop.hbase">HRegionInfo</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
index 3efb403..c1e1553 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3893">HBaseFsck.OnlineEntry</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.3892">HBaseFsck.OnlineEntry</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Stores the regioninfo retrieved from Online region servers.</div>
 </li>
@@ -206,7 +206,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hri</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3894">hri</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3893">hri</a></pre>
 </li>
 </ul>
 <a name="hsa">
@@ -215,7 +215,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hsa</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3895">hsa</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3894">hsa</a></pre>
 </li>
 </ul>
 </li>
@@ -232,7 +232,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>OnlineEntry</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3893">OnlineEntry</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3892">OnlineEntry</a>()</pre>
 </li>
 </ul>
 </li>
@@ -249,7 +249,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3898">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.OnlineEntry.html#line.3897">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>


[14/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.MetaEntry.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="line.1543"></a>
-<spa

<TRUNCATED>

[20/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</

<TRUNCATED>

[29/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
index 2559531..06869f5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Writer.html
@@ -364,627 +364,625 @@
 <span class="sourceLineNo">356</span>   */<a name="line.356"></a>
 <span class="sourceLineNo">357</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.357"></a>
 <span class="sourceLineNo">358</span>       conf) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    Configuration tempConf = new Configuration(conf);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      CacheConfig cacheConf) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    int version = getFormatVersion(conf);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    switch (version) {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    case 2:<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.373"></a>
-<span class="sourceLineNo">374</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        "in hbase-site.xml)");<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    case 3:<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    default:<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          "format version " + version);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    }<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /**<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * An abstraction used by the block index.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public interface CachingBlockReader {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     * Read in a file block.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>     * @param offset offset to read.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>     * @param onDiskBlockSize size of the block<a name="line.393"></a>
-<span class="sourceLineNo">394</span>     * @param cacheBlock<a name="line.394"></a>
-<span class="sourceLineNo">395</span>     * @param pread<a name="line.395"></a>
-<span class="sourceLineNo">396</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.396"></a>
-<span class="sourceLineNo">397</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     *  caching efficiency of encoded data blocks)<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.401"></a>
-<span class="sourceLineNo">402</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.402"></a>
-<span class="sourceLineNo">403</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>     * @return Block wrapped in a ByteBuffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>     * @throws IOException<a name="line.405"></a>
-<span class="sourceLineNo">406</span>     */<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        throws IOException;<a name="line.411"></a>
-<span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>    /**<a name="line.413"></a>
-<span class="sourceLineNo">414</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.414"></a>
-<span class="sourceLineNo">415</span>     * @param block Block to be returned.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>     */<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    void returnBlock(HFileBlock block);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.423"></a>
-<span class="sourceLineNo">424</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.424"></a>
-<span class="sourceLineNo">425</span>     * write.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>     */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    String getName();<a name="line.427"></a>
+<span class="sourceLineNo">359</span>    return HFile.getWriterFactory(conf, CacheConfig.DISABLED);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      CacheConfig cacheConf) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    int version = getFormatVersion(conf);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    switch (version) {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    case 2:<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.371"></a>
+<span class="sourceLineNo">372</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        "in hbase-site.xml)");<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    case 3:<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    default:<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          "format version " + version);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * An abstraction used by the block index.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  public interface CachingBlockReader {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>     * Read in a file block.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>     * @param offset offset to read.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>     * @param onDiskBlockSize size of the block<a name="line.391"></a>
+<span class="sourceLineNo">392</span>     * @param cacheBlock<a name="line.392"></a>
+<span class="sourceLineNo">393</span>     * @param pread<a name="line.393"></a>
+<span class="sourceLineNo">394</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.394"></a>
+<span class="sourceLineNo">395</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.396"></a>
+<span class="sourceLineNo">397</span>     *  caching efficiency of encoded data blocks)<a name="line.397"></a>
+<span class="sourceLineNo">398</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.398"></a>
+<span class="sourceLineNo">399</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.399"></a>
+<span class="sourceLineNo">400</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.400"></a>
+<span class="sourceLineNo">401</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.401"></a>
+<span class="sourceLineNo">402</span>     * @return Block wrapped in a ByteBuffer.<a name="line.402"></a>
+<span class="sourceLineNo">403</span>     * @throws IOException<a name="line.403"></a>
+<span class="sourceLineNo">404</span>     */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>        throws IOException;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>    /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>     * @param block Block to be returned.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>     */<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    void returnBlock(HFileBlock block);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.422"></a>
+<span class="sourceLineNo">423</span>     * write.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>     */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    String getName();<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    CellComparator getComparator();<a name="line.427"></a>
 <span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    CellComparator getComparator();<a name="line.429"></a>
+<span class="sourceLineNo">429</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.429"></a>
 <span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.431"></a>
+<span class="sourceLineNo">431</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.431"></a>
 <span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.433"></a>
+<span class="sourceLineNo">433</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.433"></a>
 <span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.435"></a>
+<span class="sourceLineNo">435</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.437"></a>
+<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.437"></a>
 <span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.439"></a>
+<span class="sourceLineNo">439</span>    long length();<a name="line.439"></a>
 <span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>    long length();<a name="line.441"></a>
+<span class="sourceLineNo">441</span>    long getEntries();<a name="line.441"></a>
 <span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    long getEntries();<a name="line.443"></a>
+<span class="sourceLineNo">443</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.443"></a>
 <span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.445"></a>
+<span class="sourceLineNo">445</span>    long indexSize();<a name="line.445"></a>
 <span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>    long indexSize();<a name="line.447"></a>
+<span class="sourceLineNo">447</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.447"></a>
 <span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.449"></a>
+<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.449"></a>
 <span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.451"></a>
+<span class="sourceLineNo">451</span>    FixedFileTrailer getTrailer();<a name="line.451"></a>
 <span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>    FixedFileTrailer getTrailer();<a name="line.453"></a>
+<span class="sourceLineNo">453</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.453"></a>
 <span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.455"></a>
+<span class="sourceLineNo">455</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.455"></a>
 <span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.457"></a>
+<span class="sourceLineNo">457</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.457"></a>
 <span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    /**<a name="line.461"></a>
-<span class="sourceLineNo">462</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.462"></a>
-<span class="sourceLineNo">463</span>     * {@link HFile} version.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>     * Knows nothing about how that metadata is structured.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>     */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    /**<a name="line.468"></a>
-<span class="sourceLineNo">469</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.469"></a>
-<span class="sourceLineNo">470</span>     * {@link HFile}  version.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>     * Knows nothing about how that metadata is structured.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>     */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.473"></a>
+<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.460"></a>
+<span class="sourceLineNo">461</span>     * {@link HFile} version.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>     * Knows nothing about how that metadata is structured.<a name="line.462"></a>
+<span class="sourceLineNo">463</span>     */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    /**<a name="line.466"></a>
+<span class="sourceLineNo">467</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.467"></a>
+<span class="sourceLineNo">468</span>     * {@link HFile}  version.<a name="line.468"></a>
+<span class="sourceLineNo">469</span>     * Knows nothing about how that metadata is structured.<a name="line.469"></a>
+<span class="sourceLineNo">470</span>     */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>    Path getPath();<a name="line.473"></a>
 <span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>    Path getPath();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    /** Close method with optional evictOnClose */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    void close(boolean evictOnClose) throws IOException;<a name="line.478"></a>
+<span class="sourceLineNo">475</span>    /** Close method with optional evictOnClose */<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    void close(boolean evictOnClose) throws IOException;<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.478"></a>
 <span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.480"></a>
+<span class="sourceLineNo">480</span>    boolean hasMVCCInfo();<a name="line.480"></a>
 <span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    boolean hasMVCCInfo();<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>     * Return the file context of the HFile this reader belongs to<a name="line.485"></a>
-<span class="sourceLineNo">486</span>     */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    HFileContext getFileContext();<a name="line.487"></a>
+<span class="sourceLineNo">482</span>    /**<a name="line.482"></a>
+<span class="sourceLineNo">483</span>     * Return the file context of the HFile this reader belongs to<a name="line.483"></a>
+<span class="sourceLineNo">484</span>     */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    HFileContext getFileContext();<a name="line.485"></a>
+<span class="sourceLineNo">486</span><a name="line.486"></a>
+<span class="sourceLineNo">487</span>    boolean isPrimaryReplicaReader();<a name="line.487"></a>
 <span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    boolean isPrimaryReplicaReader();<a name="line.489"></a>
+<span class="sourceLineNo">489</span>    boolean shouldIncludeMemStoreTS();<a name="line.489"></a>
 <span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>    boolean shouldIncludeMemStoreTS();<a name="line.491"></a>
+<span class="sourceLineNo">491</span>    boolean isDecodeMemStoreTS();<a name="line.491"></a>
 <span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    boolean isDecodeMemStoreTS();<a name="line.493"></a>
+<span class="sourceLineNo">493</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.493"></a>
 <span class="sourceLineNo">494</span><a name="line.494"></a>
-<span class="sourceLineNo">495</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>    @VisibleForTesting<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>    @VisibleForTesting<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    boolean prefetchComplete();<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>    /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.504"></a>
-<span class="sourceLineNo">505</span>     * implementation should take care of thread safety.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>     */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    void unbufferStream();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Method returns the reader given the specified arguments.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   *<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * @param path hfile's path<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * @param fsdis stream of path's file<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param size max size of the trailer.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hfs<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return an appropriate instance of HFileReader<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      justification="Intentional")<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    FixedFileTrailer trailer = null;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    try {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      switch (trailer.getMajorVersion()) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        case 2:<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.535"></a>
-<span class="sourceLineNo">536</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        case 3:<a name="line.537"></a>
-<span class="sourceLineNo">538</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.538"></a>
-<span class="sourceLineNo">539</span>              primaryReplicaReader, conf);<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        default:<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } catch (Throwable t) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      IOUtils.closeQuietly(fsdis);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    } finally {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      fsdis.unbuffer();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>  /**<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * The sockets and the file descriptors held by the method parameter<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * that no other threads have access to the same passed reference.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param fs A file system<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @param path Path to HFile<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * @param fsdis a stream of path's file<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @param size max size of the trailer.<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.559"></a>
-<span class="sourceLineNo">560</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.560"></a>
-<span class="sourceLineNo">561</span>   * @param conf Configuration<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @return A version specific Hfile Reader<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      throws IOException {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    HFileSystem hfs = null;<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // the filesystem.<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (!(fs instanceof HFileSystem)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      hfs = new HFileSystem(fs);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    } else {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      hfs = (HFileSystem) fs;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    }<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
-<span class="sourceLineNo">583</span>  * Creates reader with cache configuration disabled<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  * @param fs filesystem<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  * @param path Path to file to read<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  * @return an active Reader instance<a name="line.586"></a>
-<span class="sourceLineNo">587</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  */<a name="line.589"></a>
-<span class="sourceLineNo">590</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      throws IOException {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.592"></a>
-<span class="sourceLineNo">593</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>  }<a name="line.595"></a>
-<span class="sourceLineNo">596</span><a name="line.596"></a>
-<span class="sourceLineNo">597</span>  /**<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   * @param fs filesystem<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   * @param path Path to file to read<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * @param cacheConf This must not be null. @see<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   * @return an active Reader instance<a name="line.603"></a>
-<span class="sourceLineNo">604</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.604"></a>
-<span class="sourceLineNo">605</span>   *           is corrupt/invalid.<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   */<a name="line.606"></a>
-<span class="sourceLineNo">607</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>  }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>  /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * The sockets and the file descriptors held by the method parameter<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.618"></a>
-<span class="sourceLineNo">619</span>   * that no other threads have access to the same passed reference.<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
-<span class="sourceLineNo">622</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.625"></a>
-<span class="sourceLineNo">626</span>  }<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>  /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @param fs filesystem<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * @param path Path to file to verify<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * @throws IOException if failed to read from the underlying stream<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   */<a name="line.634"></a>
-<span class="sourceLineNo">635</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>  }<a name="line.637"></a>
-<span class="sourceLineNo">638</span><a name="line.638"></a>
-<span class="sourceLineNo">639</span>  /**<a name="line.639"></a>
-<span class="sourceLineNo">640</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>   * @param fs filesystem<a name="line.641"></a>
-<span class="sourceLineNo">642</span>   * @param fileStatus the file to verify<a name="line.642"></a>
-<span class="sourceLineNo">643</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.643"></a>
-<span class="sourceLineNo">644</span>   * @throws IOException if failed to read from the underlying stream<a name="line.644"></a>
-<span class="sourceLineNo">645</span>   */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    final Path path = fileStatus.getPath();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    final long size = fileStatus.getLen();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try (FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path)) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      return true;<a name="line.654"></a>
-<span class="sourceLineNo">655</span>    } catch (IllegalArgumentException e) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return false;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public FileInfo() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      super();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.680"></a>
-<span class="sourceLineNo">681</span>     * key prefix.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>     *<a name="line.682"></a>
-<span class="sourceLineNo">683</span>     * @param k key to add<a name="line.683"></a>
-<span class="sourceLineNo">684</span>     * @param v value to add<a name="line.684"></a>
-<span class="sourceLineNo">685</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.685"></a>
-<span class="sourceLineNo">686</span>     *          with the reserved prefix<a name="line.686"></a>
-<span class="sourceLineNo">687</span>     * @return this file info object<a name="line.687"></a>
-<span class="sourceLineNo">688</span>     * @throws IOException if the key or value is invalid<a name="line.688"></a>
-<span class="sourceLineNo">689</span>     */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        final boolean checkPrefix) throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (k == null || v == null) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        throw new NullPointerException("Key nor value may be null");<a name="line.693"></a>
-<span class="sourceLineNo">694</span>      }<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.696"></a>
-<span class="sourceLineNo">697</span>            + " are reserved");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      }<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      put(k, v);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return this;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    public void clear() {<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.map.clear();<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return map.comparator();<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
-<span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public boolean containsKey(Object key) {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      return map.containsKey(key);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public boolean containsValue(Object value) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return map.containsValue(value);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      return map.entrySet();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>    @Override<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    public boolean equals(Object o) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      return map.equals(o);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>    @Override<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    public byte[] firstKey() {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      return map.firstKey();<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    }<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    @Override<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    public byte[] get(Object key) {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      return map.get(key);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    @Override<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    public int hashCode() {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return map.hashCode();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    }<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>    @Override<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return this.map.headMap(toKey);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public boolean isEmpty() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      return map.isEmpty();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    @Override<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      return map.keySet();<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    }<a name="line.761"></a>
-<span class="sourceLineNo">762</span><a name="line.762"></a>
-<span class="sourceLineNo">763</span>    @Override<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    public byte[] lastKey() {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      return map.lastKey();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
-<span class="sourceLineNo">767</span><a name="line.767"></a>
-<span class="sourceLineNo">768</span>    @Override<a name="line.768"></a>
-<span class="sourceLineNo">769</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      return this.map.put(key, value);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    }<a name="line.771"></a>
-<span class="sourceLineNo">772</span><a name="line.772"></a>
-<span class="sourceLineNo">773</span>    @Override<a name="line.773"></a>
-<span class="sourceLineNo">774</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      this.map.putAll(m);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public byte[] remove(Object key) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      return this.map.remove(key);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>    @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    public int size() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return map.size();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>    @Override<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      return this.map.subMap(fromKey, toKey);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
-<span class="sourceLineNo">792</span><a name="line.792"></a>
-<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      return this.map.tailMap(fromKey);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
-<span class="sourceLineNo">797</span><a name="line.797"></a>
-<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    public Collection&lt;byte[]&gt; values() {<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return map.values();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
-<span class="sourceLineNo">802</span><a name="line.802"></a>
-<span class="sourceLineNo">803</span>    /**<a name="line.803"></a>
-<span class="sourceLineNo">804</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.804"></a>
-<span class="sourceLineNo">805</span>     * We write it as a protobuf.<a name="line.805"></a>
-<span class="sourceLineNo">806</span>     * @param out<a name="line.806"></a>
-<span class="sourceLineNo">807</span>     * @throws IOException<a name="line.807"></a>
-<span class="sourceLineNo">808</span>     * @see #read(DataInputStream)<a name="line.808"></a>
-<span class="sourceLineNo">809</span>     */<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    void write(final DataOutputStream out) throws IOException {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey()));<a name="line.814"></a>
-<span class="sourceLineNo">815</span>        bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue()));<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      builder.build().writeDelimitedTo(out);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>    /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.823"></a>
-<span class="sourceLineNo">824</span>     * Can deserialize protobuf of old Writables format.<a name="line.824"></a>
-<span class="sourceLineNo">825</span>     * @param in<a name="line.825"></a>
-<span class="sourceLineNo">826</span>     * @throws IOException<a name="line.826"></a>
-<span class="sourceLineNo">827</span>     * @see #write(DataOutputStream)<a name="line.827"></a>
-<span class="sourceLineNo">828</span>     */<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    void read(final DataInputStream in) throws IOException {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      byte [] pbuf = new byte[pblen];<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      if (in.markSupported()) in.mark(pblen);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int read = in.read(pbuf);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      } else {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>        if (in.markSupported()) {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>          in.reset();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          parseWritable(in);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        } else {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.843"></a>
-<span class="sourceLineNo">844</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.845"></a>
-<span class="sourceLineNo">846</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          parseWritable(new DataInputStream(sis));<a name="line.849"></a>
-<span class="sourceLineNo">850</span>        }<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      }<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * @throws IOException<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      this.map.clear();<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      // Read the number of entries in the map<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      int entries = in.readInt();<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      // Then read each key/value pair<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>        byte [] key = Bytes.readByteArray(in);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.867"></a>
-<span class="sourceLineNo">868</span>        in.readByte();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        byte [] value = Bytes.readByteArray(in);<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        this.map.put(key, value);<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    }<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    /**<a name="line.874"></a>
-<span class="sourceLineNo">875</span>     * Fill our map with content of the pb we read off disk<a name="line.875"></a>
-<span class="sourceLineNo">876</span>     * @param fip protobuf message to read<a name="line.876"></a>
-<span class="sourceLineNo">877</span>     */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      this.map.clear();<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  }<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  /**<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * HFile.Writer.<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   *<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   * @return Array of strings, each represents a supported compression<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.896"></a>
-<span class="sourceLineNo">897</span>   *         supported.<a name="line.897"></a>
-<span class="sourceLineNo">898</span>   *         &lt;ul&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>   *         &lt;li&gt;"none" - No compression.<a name="line.899"></a>
-<span class="sourceLineNo">900</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>   *         &lt;/ul&gt;<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   */<a name="line.902"></a>
-<span class="sourceLineNo">903</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>    return Compression.getSupportedAlgorithms();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>  }<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>  // Utility methods.<a name="line.907"></a>
-<span class="sourceLineNo">908</span>  /*<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   * @param l Long to convert to an int.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   */<a name="line.911"></a>
-<span class="sourceLineNo">912</span>  static int longToInt(final long l) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>  }<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>  /**<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   * empty list.<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   *<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   * @param fs  The file system reference.<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param regionDir  The region directory to scan.<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   * @return The list of files found.<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   * @throws IOException When scanning the files fails.<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   */<a name="line.926"></a>
-<span class="sourceLineNo">927</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      throws IOException {<a name="line.928"></a>
-<span class="sourceLineNo">929</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;&gt;();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    for(FileStatus dir : familyDirs) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      for (FileStatus file : files) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>        if (!file.isDirectory() &amp;&amp;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.936"></a>
-<span class="sourceLineNo">937</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>          regionHFiles.add(file.getPath());<a name="line.938"></a>
-<span class="sourceLineNo">939</span>        }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      }<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return regionHFiles;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * indicate that this is not a software error, but corrupted input.<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   *<a name="line.950"></a>
-<span class="sourceLineNo">951</span>   * @param version an HFile version<a name="line.951"></a>
-<span class="sourceLineNo">952</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.952"></a>
-<span class="sourceLineNo">953</span>   */<a name="line.953"></a>
-<span class="sourceLineNo">954</span>  public static void checkFormatVersion(int version)<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      throws IllegalArgumentException {<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.957"></a>
-<span class="sourceLineNo">958</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.958"></a>
-<span class="sourceLineNo">959</span>          + MAX_FORMAT_VERSION + ")");<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    }<a name="line.960"></a>
-<span class="sourceLineNo">961</span>  }<a name="line.961"></a>
-<span class="sourceLineNo">962</span><a name="line.962"></a>
-<span class="sourceLineNo">963</span><a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public static void checkHFileVersion(final Configuration c) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>    int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    if (version &lt; MAX_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>      throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY +<a name="line.967"></a>
-<span class="sourceLineNo">968</span>        " (in your hbase-*.xml files) is " + version + " which does not match " +<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        MAX_FORMAT_VERSION +<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        "; are you running with a configuration from an older or newer hbase install (an " +<a name="line.970"></a>
-<span class="sourceLineNo">971</span>        "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?");<a name="line.971"></a>
-<span class="sourceLineNo">972</span>    }<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  }<a name="line.973"></a>
-<span class="sourceLineNo">974</span><a name="line.974"></a>
-<span class="sourceLineNo">975</span>  public static void main(String[] args) throws Exception {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    // delegate to preserve old behavior<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    HFilePrettyPrinter.main(args);<a name="line.977"></a>
-<span class="sourceLineNo">978</span>  }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>}<a name="line.979"></a>
+<span class="sourceLineNo">495</span>    @VisibleForTesting<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>    @VisibleForTesting<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    boolean prefetchComplete();<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>    /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>     * implementation should take care of thread safety.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>     */<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    void unbufferStream();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
+<span class="sourceLineNo">507</span><a name="line.507"></a>
+<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>
+<span class="sourceLineNo">509</span>   * Method returns the reader given the specified arguments.<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   *<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param path hfile's path<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param fsdis stream of path's file<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param size max size of the trailer.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * @param hfs<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * @return an appropriate instance of HFileReader<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
+<span class="sourceLineNo">521</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      justification="Intentional")<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      throws IOException {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    FixedFileTrailer trailer = null;<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    try {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      switch (trailer.getMajorVersion()) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        case 2:<a name="line.532"></a>
+<span class="sourceLineNo">533</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        case 3:<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              primaryReplicaReader, conf);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        default:<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    } catch (Throwable t) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      IOUtils.closeQuietly(fsdis);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    } finally {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      fsdis.unbuffer();<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * The sockets and the file descriptors held by the method parameter<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * that no other threads have access to the same passed reference.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * @param fs A file system<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * @param path Path to HFile<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * @param fsdis a stream of path's file<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   * @param size max size of the trailer.<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param conf Configuration<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return A version specific Hfile Reader<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      throws IOException {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    HFileSystem hfs = null;<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    // the filesystem.<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    if (!(fs instanceof HFileSystem)) {<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      hfs = new HFileSystem(fs);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    } else {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      hfs = (HFileSystem) fs;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
+<span class="sourceLineNo">581</span>  * Creates reader with cache configuration disabled<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  * @param fs filesystem<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  * @param path Path to file to read<a name="line.583"></a>
+<span class="sourceLineNo">584</span>  * @return an active Reader instance<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>  */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>  }<a name="line.593"></a>
+<span class="sourceLineNo">594</span><a name="line.594"></a>
+<span class="sourceLineNo">595</span>  /**<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   * @param fs filesystem<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   * @param path Path to file to read<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * @param cacheConf This must not be null. @see<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * @return an active Reader instance<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   *           is corrupt/invalid.<a name="line.603"></a>
+<span class="sourceLineNo">604</span>   */<a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * The sockets and the file descriptors held by the method parameter<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * that no other threads have access to the same passed reference.<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
+<span class="sourceLineNo">619</span>  @VisibleForTesting<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * @param fs filesystem<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   * @param path Path to file to verify<a name="line.629"></a>
+<span class="sourceLineNo">630</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.630"></a>
+<span class="sourceLineNo">631</span>   * @throws IOException if failed to read from the underlying stream<a name="line.631"></a>
+<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
+<span class="sourceLineNo">633</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  /**<a name="line.637"></a>
+<span class="sourceLineNo">638</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * @param fs filesystem<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * @param fileStatus the file to verify<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @throws IOException if failed to read from the underlying stream<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throws IOException {<a name="line.645"></a>
+<span class="sourceLineNo">646</

<TRUNCATED>

[35/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
index 5a504a4..d2d3b91 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4333">HBaseFsck.WorkItemRegion</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4332">HBaseFsck.WorkItemRegion</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 <div class="block">Contact a region server and get all information from it</div>
@@ -226,7 +226,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>hbck</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4334">hbck</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4333">hbck</a></pre>
 </li>
 </ul>
 <a name="rsinfo">
@@ -235,7 +235,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>rsinfo</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4335">rsinfo</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4334">rsinfo</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -244,7 +244,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4336">errors</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4335">errors</a></pre>
 </li>
 </ul>
 <a name="connection">
@@ -253,7 +253,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>connection</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4337">connection</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4336">connection</a></pre>
 </li>
 </ul>
 </li>
@@ -270,7 +270,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemRegion</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4339">WorkItemRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4338">WorkItemRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
                <a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;info,
                <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
                <a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;connection)</pre>
@@ -290,7 +290,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4348">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4347">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -306,7 +306,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>filterRegions</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4382">filterRegions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;regions)</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html#line.4381">filterRegions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;regions)</pre>
 </li>
 </ul>
 </li>


[26/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
index 61bf913..1d05508 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
@@ -317,7 +317,7 @@
 <span class="sourceLineNo">309</span>      return -2;<a name="line.309"></a>
 <span class="sourceLineNo">310</span>    }<a name="line.310"></a>
 <span class="sourceLineNo">311</span><a name="line.311"></a>
-<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(getConf()), true, getConf());<a name="line.312"></a>
+<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, CacheConfig.DISABLED, true, getConf());<a name="line.312"></a>
 <span class="sourceLineNo">313</span><a name="line.313"></a>
 <span class="sourceLineNo">314</span>    Map&lt;byte[], byte[]&gt; fileInfo = reader.loadFileInfo();<a name="line.314"></a>
 <span class="sourceLineNo">315</span><a name="line.315"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.Builder.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.Builder.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.Builder.html
index 61bf913..1d05508 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.Builder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.Builder.html
@@ -317,7 +317,7 @@
 <span class="sourceLineNo">309</span>      return -2;<a name="line.309"></a>
 <span class="sourceLineNo">310</span>    }<a name="line.310"></a>
 <span class="sourceLineNo">311</span><a name="line.311"></a>
-<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(getConf()), true, getConf());<a name="line.312"></a>
+<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, CacheConfig.DISABLED, true, getConf());<a name="line.312"></a>
 <span class="sourceLineNo">313</span><a name="line.313"></a>
 <span class="sourceLineNo">314</span>    Map&lt;byte[], byte[]&gt; fileInfo = reader.loadFileInfo();<a name="line.314"></a>
 <span class="sourceLineNo">315</span><a name="line.315"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
index 61bf913..1d05508 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
@@ -317,7 +317,7 @@
 <span class="sourceLineNo">309</span>      return -2;<a name="line.309"></a>
 <span class="sourceLineNo">310</span>    }<a name="line.310"></a>
 <span class="sourceLineNo">311</span><a name="line.311"></a>
-<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(getConf()), true, getConf());<a name="line.312"></a>
+<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, CacheConfig.DISABLED, true, getConf());<a name="line.312"></a>
 <span class="sourceLineNo">313</span><a name="line.313"></a>
 <span class="sourceLineNo">314</span>    Map&lt;byte[], byte[]&gt; fileInfo = reader.loadFileInfo();<a name="line.314"></a>
 <span class="sourceLineNo">315</span><a name="line.315"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
index 61bf913..1d05508 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
@@ -317,7 +317,7 @@
 <span class="sourceLineNo">309</span>      return -2;<a name="line.309"></a>
 <span class="sourceLineNo">310</span>    }<a name="line.310"></a>
 <span class="sourceLineNo">311</span><a name="line.311"></a>
-<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(getConf()), true, getConf());<a name="line.312"></a>
+<span class="sourceLineNo">312</span>    HFile.Reader reader = HFile.createReader(fs, file, CacheConfig.DISABLED, true, getConf());<a name="line.312"></a>
 <span class="sourceLineNo">313</span><a name="line.313"></a>
 <span class="sourceLineNo">314</span>    Map&lt;byte[], byte[]&gt; fileInfo = reader.loadFileInfo();<a name="line.314"></a>
 <span class="sourceLineNo">315</span><a name="line.315"></a>


[36/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
index 0be63ac..fe95742 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4202">HBaseFsck.PrintingErrorReporter</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4201">HBaseFsck.PrintingErrorReporter</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></pre>
 </li>
@@ -301,7 +301,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>errorCount</h4>
-<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4203">errorCount</a></pre>
+<pre>public&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4202">errorCount</a></pre>
 </li>
 </ul>
 <a name="showProgress">
@@ -310,7 +310,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>showProgress</h4>
-<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4204">showProgress</a></pre>
+<pre>private&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4203">showProgress</a></pre>
 </li>
 </ul>
 <a name="progressThreshold">
@@ -319,7 +319,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>progressThreshold</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4206">progressThreshold</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4205">progressThreshold</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.PrintingErrorReporter.progressThreshold">Constant Field Values</a></dd>
@@ -332,7 +332,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>errorTables</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4208">errorTables</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4207">errorTables</a></pre>
 </li>
 </ul>
 <a name="errorList">
@@ -341,7 +341,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockListLast">
 <li class="blockList">
 <h4>errorList</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4211">errorList</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4210">errorList</a></pre>
 </li>
 </ul>
 </li>
@@ -358,7 +358,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PrintingErrorReporter</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4202">PrintingErrorReporter</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4201">PrintingErrorReporter</a>()</pre>
 </li>
 </ul>
 </li>
@@ -375,7 +375,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>clear</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4214">clear</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4213">clear</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#clear--">clear</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -388,7 +388,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4221">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4220">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -402,7 +402,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4236">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4235">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 <dl>
@@ -417,7 +417,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4242">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4241">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info)</pre>
@@ -433,7 +433,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4250">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4249">reportError</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&nbsp;errorCode,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;info1,
@@ -450,7 +450,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>reportError</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4259">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4258">reportError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#reportError-java.lang.String-">reportError</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -463,7 +463,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>report</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4269">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4268">report</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <div class="block">Report error information, but do not increment the error count.  Intended for cases
  where the actual error would have been reported previously.</div>
 <dl>
@@ -480,7 +480,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>summarize</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4277">summarize</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4276">summarize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#summarize--">summarize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -493,7 +493,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>getErrorList</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4290">getErrorList</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/ArrayList.html?is-external=true" title="class or interface in java.util">ArrayList</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter.ERROR_CODE</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4289">getErrorList</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#getErrorList--">getErrorList</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -506,7 +506,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>print</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4295">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4294">print</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#print-java.lang.String-">print</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -519,7 +519,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>tableHasErrors</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4302">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4301">tableHasErrors</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;table)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#tableHasErrors-org.apache.hadoop.hbase.util.HBaseFsck.TableInfo-">tableHasErrors</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -532,7 +532,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>resetErrors</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4307">resetErrors</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4306">resetErrors</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#resetErrors--">resetErrors</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -545,7 +545,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockList">
 <li class="blockList">
 <h4>detail</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4312">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4311">detail</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#detail-java.lang.String-">detail</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>
@@ -558,7 +558,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorR
 <ul class="blockListLast">
 <li class="blockList">
 <h4>progress</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4320">progress</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html#line.4319">progress</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html#progress--">progress</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
index 0fc9527..c84735e 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
@@ -126,7 +126,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1325">HBaseFsck.RegionRepairException</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.1324">HBaseFsck.RegionRepairException</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Exception thrown when a integrity repair operation fails in an
  unresolvable way.</div>
@@ -221,7 +221,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockList">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1326">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1325">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.util.HBaseFsck.RegionRepairException.serialVersionUID">Constant Field Values</a></dd>
@@ -234,7 +234,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ioe</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1327">ioe</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1326">ioe</a></pre>
 </li>
 </ul>
 </li>
@@ -251,7 +251,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RegionRepairException</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1328">RegionRepairException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;s,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html#line.1327">RegionRepairException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;s,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>&nbsp;ioe)</pre>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
index 8361276..b428a58 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3016">HBaseFsck.TableInfo.HDFSIntegrityFixer</a>
+<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3015">HBaseFsck.TableInfo.HDFSIntegrityFixer</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo.IntegrityFixSuggester</a></pre>
 <div class="block">This handler fixes integrity errors from hdfs information.  There are
  basically three classes of integrity problems 1) holes, 2) overlaps, and
@@ -295,7 +295,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3017">conf</a></pre>
+<pre>org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3016">conf</a></pre>
 </li>
 </ul>
 <a name="fixOverlaps">
@@ -304,7 +304,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fixOverlaps</h4>
-<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3019">fixOverlaps</a></pre>
+<pre>boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3018">fixOverlaps</a></pre>
 </li>
 </ul>
 </li>
@@ -321,7 +321,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>HDFSIntegrityFixer</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3021">HDFSIntegrityFixer</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3020">HDFSIntegrityFixer</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
                    <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
                    org.apache.hadoop.conf.Configuration&nbsp;conf,
                    boolean&nbsp;fixHoles,
@@ -342,7 +342,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionStartKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3035">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;next)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3034">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;next)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This is a special case hole -- when the first region of a table is
  missing from META, HBase doesn't acknowledge the existance of the
@@ -367,7 +367,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionEndKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3055">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3054">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleRegionEndKeyNotEmpty-byte:A-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling case where a Table has a last region that does not
@@ -391,7 +391,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleHoleInRegionChain</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3077">handleHoleInRegionChain</a>(byte[]&nbsp;holeStartKey,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3076">handleHoleInRegionChain</a>(byte[]&nbsp;holeStartKey,
                                     byte[]&nbsp;holeStopKey)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">There is a hole in the hdfs regions that violates the table integrity
@@ -415,7 +415,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>handleOverlapGroup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3107">handleOverlapGroup</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3106">handleOverlapGroup</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">This takes set of overlapping regions and merges them into a single
  region.  This covers cases like degenerate regions, shared start key,
@@ -444,7 +444,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>removeParentsAndFixSplits</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3132">removeParentsAndFixSplits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3131">removeParentsAndFixSplits</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -458,7 +458,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockList">
 <li class="blockList">
 <h4>mergeOverlaps</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3244">mergeOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3243">mergeOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlap)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -472,7 +472,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>sidelineBigOverlaps</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3322">sidelineBigOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;bigOverlap)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html#line.3321">sidelineBigOverlaps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;bigOverlap)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Sideline some regions in a big overlap group so that it
  will have fewer regions, and it is easier to merge them later on.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
index 2842fe9..3517ea8 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2932">HBaseFsck.TableInfo.IntegrityFixSuggester</a>
+<pre>private class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2931">HBaseFsck.TableInfo.IntegrityFixSuggester</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html" title="class in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandlerImpl</a></pre>
 </li>
 </ul>
@@ -267,7 +267,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockListLast">
 <li class="blockList">
 <h4>errors</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2933">errors</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2932">errors</a></pre>
 </li>
 </ul>
 </li>
@@ -284,7 +284,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockListLast">
 <li class="blockList">
 <h4>IntegrityFixSuggester</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2935">IntegrityFixSuggester</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2934">IntegrityFixSuggester</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.TableInfo</a>&nbsp;ti,
                       <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors)</pre>
 </li>
 </ul>
@@ -302,7 +302,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionStartKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2941">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2940">handleRegionStartKeyNotEmpty</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleRegionStartKeyNotEmpty-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling case where a Table has a first region that does not
@@ -327,7 +327,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleRegionEndKeyNotEmpty</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2949">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2948">handleRegionEndKeyNotEmpty</a>(byte[]&nbsp;curEndKey)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleRegionEndKeyNotEmpty-byte:A-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling case where a Table has a last region that does not
@@ -351,7 +351,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleDegenerateRegion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2956">handleDegenerateRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2955">handleDegenerateRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleDegenerateRegion-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
 <div class="block">Callback for handling a region that has the same start and end key.</div>
@@ -373,7 +373,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleDuplicateStartKeys</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2962">handleDuplicateStartKeys</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2961">handleDuplicateStartKeys</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
                                      <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r2)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleDuplicateStartKeys-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
@@ -398,7 +398,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleSplit</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2974">handleSplit</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2973">handleSplit</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r1,
                         <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;r2)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html#handleSplit-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandler</a></code></span></div>
@@ -419,7 +419,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockList">
 <li class="blockList">
 <h4>handleOverlapInRegionChain</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2986">handleOverlapInRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi1,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2985">handleOverlapInRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi1,
                                        <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hi2)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleOverlapInRegionChain-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo-">TableIntegrityErrorHandlerImpl</a></code></span></div>
@@ -446,7 +446,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrity
 <ul class="blockListLast">
 <li class="blockList">
 <h4>handleHoleInRegionChain</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2993">handleHoleInRegionChain</a>(byte[]&nbsp;holeStart,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.IntegrityFixSuggester.html#line.2992">handleHoleInRegionChain</a>(byte[]&nbsp;holeStart,
                                     byte[]&nbsp;holeStop)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.html#handleHoleInRegionChain-byte:A-byte:A-">TableIntegrityErrorHandlerImpl</a></code></span></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index b0efaac..3bdbb5b 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2837">HBaseFsck.TableInfo</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2836">HBaseFsck.TableInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Maintain information about a particular table.</div>
 </li>
@@ -293,7 +293,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tableName</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2838">tableName</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2837">tableName</a></pre>
 </li>
 </ul>
 <a name="deployedOn">
@@ -302,7 +302,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deployedOn</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeSet.html?is-external=true" title="class or interface in java.util">TreeSet</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2839">deployedOn</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/TreeSet.html?is-external=true" title="class or interface in java.util">TreeSet</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2838">deployedOn</a></pre>
 </li>
 </ul>
 <a name="backwards">
@@ -311,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>backwards</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2842">backwards</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2841">backwards</a></pre>
 </li>
 </ul>
 <a name="sidelinedRegions">
@@ -320,7 +320,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>sidelinedRegions</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.Path,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2845">sidelinedRegions</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;org.apache.hadoop.fs.Path,<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2844">sidelinedRegions</a></pre>
 </li>
 </ul>
 <a name="sc">
@@ -329,7 +329,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>sc</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/RegionSplitCalculator.html" title="class in org.apache.hadoop.hbase.util">RegionSplitCalculator</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2848">sc</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/RegionSplitCalculator.html" title="class in org.apache.hadoop.hbase.util">RegionSplitCalculator</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2847">sc</a></pre>
 </li>
 </ul>
 <a name="htds">
@@ -338,7 +338,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>htds</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2851">htds</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2850">htds</a></pre>
 </li>
 </ul>
 <a name="overlapGroups">
@@ -347,7 +347,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>overlapGroups</h4>
-<pre>final&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2854">overlapGroups</a></pre>
+<pre>final&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2853">overlapGroups</a></pre>
 </li>
 </ul>
 <a name="regionsFromMeta">
@@ -356,7 +356,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>regionsFromMeta</h4>
-<pre>private&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2858">regionsFromMeta</a></pre>
+<pre>private&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2857">regionsFromMeta</a></pre>
 </li>
 </ul>
 </li>
@@ -373,7 +373,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TableInfo</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2860">TableInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2859">TableInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)</pre>
 </li>
 </ul>
 </li>
@@ -390,7 +390,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getHTD</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2868">getHTD</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2867">getHTD</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>descriptor common to all regions.  null if are none or multiple!</dd>
@@ -403,7 +403,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addRegionInfo</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2878">addRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hir)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2877">addRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hir)</pre>
 </li>
 </ul>
 <a name="addServer-org.apache.hadoop.hbase.ServerName-">
@@ -412,7 +412,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2903">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2902">addServer</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;server)</pre>
 </li>
 </ul>
 <a name="getName--">
@@ -421,7 +421,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2907">getName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2906">getName</a>()</pre>
 </li>
 </ul>
 <a name="getNumRegions--">
@@ -430,7 +430,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumRegions</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2911">getNumRegions</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2910">getNumRegions</a>()</pre>
 </li>
 </ul>
 <a name="getRegionsFromMeta--">
@@ -439,7 +439,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionsFromMeta</h4>
-<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2915">getRegionsFromMeta</a>()</pre>
+<pre>public&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.2914">getRegionsFromMeta</a>()</pre>
 </li>
 </ul>
 <a name="checkRegionChain-org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler-">
@@ -448,7 +448,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkRegionChain</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3370">checkRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler)
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3369">checkRegionChain</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check the region chain (from META) of this table.  We are looking for
  holes, overlaps, and cycles.</div>
@@ -466,7 +466,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>handleOverlapsParallel</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3500">handleOverlapsParallel</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3499">handleOverlapsParallel</a>(<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler,
                                        byte[]&nbsp;prevKey)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -481,7 +481,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>dump</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3537">dump</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true" title="class or interface in java.util">SortedSet</a>&lt;byte[]&gt;&nbsp;splits,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html#line.3536">dump</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true" title="class or interface in java.util">SortedSet</a>&lt;byte[]&gt;&nbsp;splits,
           org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;regions)</pre>
 <div class="block">This dumps data in a visually reasonable way for visual debugging</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
index dd4d462..4adc038 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4398">HBaseFsck.WorkItemHdfsDir</a>
+<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4397">HBaseFsck.WorkItemHdfsDir</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 <div class="block">Contact hdfs and get all information about specified table directory into
@@ -218,7 +218,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>tableDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileStatus <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4399">tableDir</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileStatus <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4398">tableDir</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -227,7 +227,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4400">errors</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4399">errors</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -236,7 +236,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4401">fs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4400">fs</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemHdfsDir</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4403">WorkItemHdfsDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4402">WorkItemHdfsDir</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                 <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors,
                 org.apache.hadoop.fs.FileStatus&nbsp;status)</pre>
 </li>
@@ -272,7 +272,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4411">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsDir.html#line.4410">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a>,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true" title="class or interface in java.util.concurrent">ExecutionException</a></pre>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
index a831646..aefc1e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4518">HBaseFsck.WorkItemHdfsRegionInfo</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.4517">HBaseFsck.WorkItemHdfsRegionInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 <div class="block">Contact hdfs and get all information about specified table directory into
@@ -218,7 +218,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>hbi</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4519">hbi</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4518">hbi</a></pre>
 </li>
 </ul>
 <a name="hbck">
@@ -227,7 +227,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>hbck</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4520">hbck</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4519">hbck</a></pre>
 </li>
 </ul>
 <a name="errors">
@@ -236,7 +236,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>errors</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4521">errors</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4520">errors</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemHdfsRegionInfo</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4523">WorkItemHdfsRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4522">WorkItemHdfsRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi,
                        <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck</a>&nbsp;hbck,
                        <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.html" title="interface in org.apache.hadoop.hbase.util">HBaseFsck.ErrorReporter</a>&nbsp;errors)</pre>
 </li>
@@ -272,7 +272,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4530">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemHdfsRegionInfo.html#line.4529">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
index f57507c..e904684 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2818">HBaseFsck.WorkItemOverlapMerge</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2817">HBaseFsck.WorkItemOverlapMerge</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 </li>
@@ -211,7 +211,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>handler</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2819">handler</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2818">handler</a></pre>
 </li>
 </ul>
 <a name="overlapgroup">
@@ -220,7 +220,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>overlapgroup</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2820">overlapgroup</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2819">overlapgroup</a></pre>
 </li>
 </ul>
 </li>
@@ -237,7 +237,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WorkItemOverlapMerge</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2822">WorkItemOverlapMerge</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlapgroup,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2821">WorkItemOverlapMerge</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&gt;&nbsp;overlapgroup,
                      <a href="../../../../../org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.html" title="interface in org.apache.hadoop.hbase.util.hbck">TableIntegrityErrorHandler</a>&nbsp;handler)</pre>
 </li>
 </ul>
@@ -255,7 +255,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2828">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemOverlapMerge.html#line.2827">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>


[03/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.WorkItemRegion.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a na

<TRUNCATED>

[23/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 5ee521a..68dec5a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -422,572 +422,570 @@
 <span class="sourceLineNo">414</span>        DataBlockEncoding encoding = overriddenEncoding;<a name="line.414"></a>
 <span class="sourceLineNo">415</span>        encoding = encoding == null ? datablockEncodingMap.get(tableAndFamily) : encoding;<a name="line.415"></a>
 <span class="sourceLineNo">416</span>        encoding = encoding == null ? DataBlockEncoding.NONE : encoding;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        Configuration tempConf = new Configuration(conf);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.419"></a>
-<span class="sourceLineNo">420</span>                                    .withCompression(compression)<a name="line.420"></a>
-<span class="sourceLineNo">421</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.421"></a>
-<span class="sourceLineNo">422</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                                    .withBlockSize(blockSize);<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          contextBuilder.withIncludesTags(true);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        if (null == favoredNodes) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          wl.writer =<a name="line.432"></a>
-<span class="sourceLineNo">433</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs)<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        } else {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>          wl.writer =<a name="line.437"></a>
-<span class="sourceLineNo">438</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))<a name="line.438"></a>
-<span class="sourceLineNo">439</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.439"></a>
-<span class="sourceLineNo">440</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.440"></a>
-<span class="sourceLineNo">441</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>        this.writers.put(tableAndFamily, wl);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        return wl;<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        if (w != null) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.450"></a>
-<span class="sourceLineNo">451</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.452"></a>
-<span class="sourceLineNo">453</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>              Bytes.toBytes(true));<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>              Bytes.toBytes(compactionExclude));<a name="line.457"></a>
-<span class="sourceLineNo">458</span>          w.appendTrackedTimestampsToMetadata();<a name="line.458"></a>
-<span class="sourceLineNo">459</span>          w.close();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      @Override<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      public void close(TaskAttemptContext c)<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      throws IOException, InterruptedException {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        for (WriterLength wl: this.writers.values()) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          close(wl.writer);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    };<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
-<span class="sourceLineNo">474</span>   * Configure block storage policy for CF after the directory is created.<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   */<a name="line.475"></a>
-<span class="sourceLineNo">476</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      return;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    String policy =<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.483"></a>
-<span class="sourceLineNo">484</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
-<span class="sourceLineNo">487</span><a name="line.487"></a>
-<span class="sourceLineNo">488</span>  /*<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   */<a name="line.490"></a>
-<span class="sourceLineNo">491</span>  static class WriterLength {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    long written = 0;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    StoreFileWriter writer = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Return the start keys of all of the regions in this table,<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * as a list of ImmutableBytesWritable.<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   */<a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.500"></a>
-<span class="sourceLineNo">501</span>                                                                 boolean writeMultipleTables)<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          throws IOException {<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      TableName tableName = regionLocator.getName();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      for (byte[] byteKey : byteKeys) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        if (writeMultipleTables)<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        {<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          //MultiTableHFileOutputFormat use case<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        if (LOG.isDebugEnabled()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.518"></a>
-<span class="sourceLineNo">519</span>                  (fullKey) + "]");<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    }<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return ret;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("deprecation")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    if (startKeys.isEmpty()) {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>      throw new IllegalArgumentException("No regions passed");<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    }<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // We're generating a list of split points, and we don't ever<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    // empty reducer with index 0<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    ImmutableBytesWritable first = sorted.first();<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    if (writeMultipleTables) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              ().get()));<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      throw new IllegalArgumentException(<a name="line.550"></a>
-<span class="sourceLineNo">551</span>          "First region of table should have empty start key. Instead has: "<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          + Bytes.toStringBinary(first.get()));<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    sorted.remove(sorted.first());<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    // Write the actual file<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      NullWritable.class);<a name="line.560"></a>
-<span class="sourceLineNo">561</span><a name="line.561"></a>
-<span class="sourceLineNo">562</span>    try {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.563"></a>
-<span class="sourceLineNo">564</span>        writer.append(startKey, NullWritable.get());<a name="line.564"></a>
-<span class="sourceLineNo">565</span>      }<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    } finally {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      writer.close();<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  /**<a name="line.571"></a>
-<span class="sourceLineNo">572</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.572"></a>
-<span class="sourceLineNo">573</span>   * table. This<a name="line.573"></a>
-<span class="sourceLineNo">574</span>   * &lt;ul&gt;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   * &lt;/ul&gt;<a name="line.581"></a>
-<span class="sourceLineNo">582</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   * running this function.<a name="line.583"></a>
-<span class="sourceLineNo">584</span>   */<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      throws IOException {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * table. This<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   * &lt;ul&gt;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.596"></a>
-<span class="sourceLineNo">597</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * &lt;/ul&gt;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * running this function.<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
-<span class="sourceLineNo">604</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      RegionLocator regionLocator) throws IOException {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.608"></a>
-<span class="sourceLineNo">609</span>  }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    Configuration conf = job.getConfiguration();<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    job.setOutputFormatClass(cls);<a name="line.616"></a>
-<span class="sourceLineNo">617</span><a name="line.617"></a>
-<span class="sourceLineNo">618</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    }<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    boolean writeMultipleTables = false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      writeMultipleTables = true;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    // sort the incoming values.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>      job.setReducerClass(CellSortReducer.class);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      job.setReducerClass(PutSortReducer.class);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      job.setReducerClass(TextSortReducer.class);<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.637"></a>
-<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
-<span class="sourceLineNo">639</span><a name="line.639"></a>
-<span class="sourceLineNo">640</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        CellSerialization.class.getName());<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    }<a name="line.646"></a>
-<span class="sourceLineNo">647</span><a name="line.647"></a>
-<span class="sourceLineNo">648</span>    /* Now get the region start keys for every table required */<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.651"></a>
-<span class="sourceLineNo">652</span><a name="line.652"></a>
-<span class="sourceLineNo">653</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      String tn = writeMultipleTables?<a name="line.656"></a>
-<span class="sourceLineNo">657</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      allTableNames.add(tn);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.663"></a>
-<span class="sourceLineNo">664</span>            .toString(tableSeparator)));<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // Use table's region boundaries for TOP split points.<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>        "to match current region count for all tables");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    job.setNumReduceTasks(startKeys.size());<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    // Set compression algorithms based on column families<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.674"></a>
+<span class="sourceLineNo">417</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.417"></a>
+<span class="sourceLineNo">418</span>                                    .withCompression(compression)<a name="line.418"></a>
+<span class="sourceLineNo">419</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.419"></a>
+<span class="sourceLineNo">420</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.420"></a>
+<span class="sourceLineNo">421</span>                                    .withBlockSize(blockSize);<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>          contextBuilder.withIncludesTags(true);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>        }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>        if (null == favoredNodes) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>          wl.writer =<a name="line.430"></a>
+<span class="sourceLineNo">431</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, fs)<a name="line.431"></a>
+<span class="sourceLineNo">432</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.432"></a>
+<span class="sourceLineNo">433</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.433"></a>
+<span class="sourceLineNo">434</span>        } else {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>          wl.writer =<a name="line.435"></a>
+<span class="sourceLineNo">436</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, new HFileSystem(fs))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.437"></a>
+<span class="sourceLineNo">438</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.438"></a>
+<span class="sourceLineNo">439</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        }<a name="line.440"></a>
+<span class="sourceLineNo">441</span><a name="line.441"></a>
+<span class="sourceLineNo">442</span>        this.writers.put(tableAndFamily, wl);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        return wl;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>        if (w != null) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.448"></a>
+<span class="sourceLineNo">449</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.450"></a>
+<span class="sourceLineNo">451</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.452"></a>
+<span class="sourceLineNo">453</span>              Bytes.toBytes(true));<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.454"></a>
+<span class="sourceLineNo">455</span>              Bytes.toBytes(compactionExclude));<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          w.appendTrackedTimestampsToMetadata();<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          w.close();<a name="line.457"></a>
+<span class="sourceLineNo">458</span>        }<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>      @Override<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      public void close(TaskAttemptContext c)<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      throws IOException, InterruptedException {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        for (WriterLength wl: this.writers.values()) {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>          close(wl.writer);<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        }<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    };<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * Configure block storage policy for CF after the directory is created.<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.474"></a>
+<span class="sourceLineNo">475</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      return;<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    }<a name="line.478"></a>
+<span class="sourceLineNo">479</span><a name="line.479"></a>
+<span class="sourceLineNo">480</span>    String policy =<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.481"></a>
+<span class="sourceLineNo">482</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /*<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  static class WriterLength {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    long written = 0;<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    StoreFileWriter writer = null;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
+<span class="sourceLineNo">493</span><a name="line.493"></a>
+<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * Return the start keys of all of the regions in this table,<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   * as a list of ImmutableBytesWritable.<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   */<a name="line.497"></a>
+<span class="sourceLineNo">498</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.498"></a>
+<span class="sourceLineNo">499</span>                                                                 boolean writeMultipleTables)<a name="line.499"></a>
+<span class="sourceLineNo">500</span>          throws IOException {<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.503"></a>
+<span class="sourceLineNo">504</span>    {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>      TableName tableName = regionLocator.getName();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      for (byte[] byteKey : byteKeys) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        if (writeMultipleTables)<a name="line.510"></a>
+<span class="sourceLineNo">511</span>        {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>          //MultiTableHFileOutputFormat use case<a name="line.512"></a>
+<span class="sourceLineNo">513</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>        if (LOG.isDebugEnabled()) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.516"></a>
+<span class="sourceLineNo">517</span>                  (fullKey) + "]");<a name="line.517"></a>
+<span class="sourceLineNo">518</span>        }<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.519"></a>
+<span class="sourceLineNo">520</span>      }<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    return ret;<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  }<a name="line.523"></a>
+<span class="sourceLineNo">524</span><a name="line.524"></a>
+<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
+<span class="sourceLineNo">526</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.526"></a>
+<span class="sourceLineNo">527</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.527"></a>
+<span class="sourceLineNo">528</span>   */<a name="line.528"></a>
+<span class="sourceLineNo">529</span>  @SuppressWarnings("deprecation")<a name="line.529"></a>
+<span class="sourceLineNo">530</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>    if (startKeys.isEmpty()) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>      throw new IllegalArgumentException("No regions passed");<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
+<span class="sourceLineNo">536</span><a name="line.536"></a>
+<span class="sourceLineNo">537</span>    // We're generating a list of split points, and we don't ever<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    // empty reducer with index 0<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    ImmutableBytesWritable first = sorted.first();<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    if (writeMultipleTables) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.544"></a>
+<span class="sourceLineNo">545</span>              ().get()));<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      throw new IllegalArgumentException(<a name="line.548"></a>
+<span class="sourceLineNo">549</span>          "First region of table should have empty start key. Instead has: "<a name="line.549"></a>
+<span class="sourceLineNo">550</span>          + Bytes.toStringBinary(first.get()));<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    }<a name="line.551"></a>
+<span class="sourceLineNo">552</span>    sorted.remove(sorted.first());<a name="line.552"></a>
+<span class="sourceLineNo">553</span><a name="line.553"></a>
+<span class="sourceLineNo">554</span>    // Write the actual file<a name="line.554"></a>
+<span class="sourceLineNo">555</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.555"></a>
+<span class="sourceLineNo">556</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      NullWritable.class);<a name="line.558"></a>
+<span class="sourceLineNo">559</span><a name="line.559"></a>
+<span class="sourceLineNo">560</span>    try {<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        writer.append(startKey, NullWritable.get());<a name="line.562"></a>
+<span class="sourceLineNo">563</span>      }<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    } finally {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      writer.close();<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    }<a name="line.566"></a>
+<span class="sourceLineNo">567</span>  }<a name="line.567"></a>
+<span class="sourceLineNo">568</span><a name="line.568"></a>
+<span class="sourceLineNo">569</span>  /**<a name="line.569"></a>
+<span class="sourceLineNo">570</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.570"></a>
+<span class="sourceLineNo">571</span>   * table. This<a name="line.571"></a>
+<span class="sourceLineNo">572</span>   * &lt;ul&gt;<a name="line.572"></a>
+<span class="sourceLineNo">573</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * &lt;/ul&gt;<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   * running this function.<a name="line.581"></a>
+<span class="sourceLineNo">582</span>   */<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.583"></a>
+<span class="sourceLineNo">584</span>      throws IOException {<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
+<span class="sourceLineNo">587</span><a name="line.587"></a>
+<span class="sourceLineNo">588</span>  /**<a name="line.588"></a>
+<span class="sourceLineNo">589</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.589"></a>
+<span class="sourceLineNo">590</span>   * table. This<a name="line.590"></a>
+<span class="sourceLineNo">591</span>   * &lt;ul&gt;<a name="line.591"></a>
+<span class="sourceLineNo">592</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.592"></a>
+<span class="sourceLineNo">593</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.593"></a>
+<span class="sourceLineNo">594</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.594"></a>
+<span class="sourceLineNo">595</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * &lt;/ul&gt;<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * running this function.<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   */<a name="line.601"></a>
+<span class="sourceLineNo">602</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      RegionLocator regionLocator) throws IOException {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>  }<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.610"></a>
+<span class="sourceLineNo">611</span>    Configuration conf = job.getConfiguration();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    job.setOutputFormatClass(cls);<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span>    boolean writeMultipleTables = false;<a name="line.619"></a>
+<span class="sourceLineNo">620</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      writeMultipleTables = true;<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    // sort the incoming values.<a name="line.625"></a>
+<span class="sourceLineNo">626</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.626"></a>
+<span class="sourceLineNo">627</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      job.setReducerClass(CellSortReducer.class);<a name="line.629"></a>
+<span class="sourceLineNo">630</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>      job.setReducerClass(PutSortReducer.class);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      job.setReducerClass(TextSortReducer.class);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    } else {<a name="line.634"></a>
+<span class="sourceLineNo">635</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    }<a name="line.636"></a>
+<span class="sourceLineNo">637</span><a name="line.637"></a>
+<span class="sourceLineNo">638</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.639"></a>
+<span class="sourceLineNo">640</span>        CellSerialization.class.getName());<a name="line.640"></a>
+<span class="sourceLineNo">641</span><a name="line.641"></a>
+<span class="sourceLineNo">642</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.643"></a>
+<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
+<span class="sourceLineNo">645</span><a name="line.645"></a>
+<span class="sourceLineNo">646</span>    /* Now get the region start keys for every table required */<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.647"></a>
+<span class="sourceLineNo">648</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    {<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.653"></a>
+<span class="sourceLineNo">654</span>      String tn = writeMultipleTables?<a name="line.654"></a>
+<span class="sourceLineNo">655</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.655"></a>
+<span class="sourceLineNo">656</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      allTableNames.add(tn);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.660"></a>
+<span class="sourceLineNo">661</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.661"></a>
+<span class="sourceLineNo">662</span>            .toString(tableSeparator)));<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    // Use table's region boundaries for TOP split points.<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.665"></a>
+<span class="sourceLineNo">666</span>        "to match current region count for all tables");<a name="line.666"></a>
+<span class="sourceLineNo">667</span>    job.setNumReduceTasks(startKeys.size());<a name="line.667"></a>
+<span class="sourceLineNo">668</span><a name="line.668"></a>
+<span class="sourceLineNo">669</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.669"></a>
+<span class="sourceLineNo">670</span>    // Set compression algorithms based on column families<a name="line.670"></a>
+<span class="sourceLineNo">671</span><a name="line.671"></a>
+<span class="sourceLineNo">672</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.672"></a>
+<span class="sourceLineNo">673</span>            tableDescriptors));<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.674"></a>
 <span class="sourceLineNo">675</span>            tableDescriptors));<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.676"></a>
+<span class="sourceLineNo">676</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.676"></a>
 <span class="sourceLineNo">677</span>            tableDescriptors));<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>            tableDescriptors));<a name="line.679"></a>
-<span class="sourceLineNo">680</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        tableDescriptors));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.682"></a>
-<span class="sourceLineNo">683</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.683"></a>
-<span class="sourceLineNo">684</span><a name="line.684"></a>
-<span class="sourceLineNo">685</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    TableMapReduceUtil.initCredentials(job);<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.687"></a>
-<span class="sourceLineNo">688</span>  }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    Configuration conf = job.getConfiguration();<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.699"></a>
-<span class="sourceLineNo">700</span><a name="line.700"></a>
-<span class="sourceLineNo">701</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    // Set compression algorithms based on column families<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY,<a name="line.703"></a>
-<span class="sourceLineNo">704</span>        serializeColumnFamilyAttribute(compressionDetails, singleTableDescriptor));<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY,<a name="line.705"></a>
-<span class="sourceLineNo">706</span>        serializeColumnFamilyAttribute(blockSizeDetails, singleTableDescriptor));<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY,<a name="line.707"></a>
-<span class="sourceLineNo">708</span>        serializeColumnFamilyAttribute(bloomTypeDetails, singleTableDescriptor));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY,<a name="line.709"></a>
-<span class="sourceLineNo">710</span>        serializeColumnFamilyAttribute(bloomParamDetails, singleTableDescriptor));<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.711"></a>
-<span class="sourceLineNo">712</span>        serializeColumnFamilyAttribute(dataBlockEncodingDetails, singleTableDescriptor));<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    TableMapReduceUtil.initCredentials(job);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    LOG.info("Incremental table " + tableDescriptor.getTableName() + " output configured.");<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  /**<a name="line.719"></a>
-<span class="sourceLineNo">720</span>   * Runs inside the task to deserialize column family to compression algorithm<a name="line.720"></a>
-<span class="sourceLineNo">721</span>   * map from the configuration.<a name="line.721"></a>
-<span class="sourceLineNo">722</span>   *<a name="line.722"></a>
-<span class="sourceLineNo">723</span>   * @param conf to read the serialized values from<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * @return a map from column family to the configured compression algorithm<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   */<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  @VisibleForTesting<a name="line.726"></a>
-<span class="sourceLineNo">727</span>  static Map&lt;byte[], Algorithm&gt; createFamilyCompressionMap(Configuration<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      conf) {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        COMPRESSION_FAMILIES_CONF_KEY);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    Map&lt;byte[], Algorithm&gt; compressionMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue());<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      compressionMap.put(e.getKey(), algorithm);<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return compressionMap;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>   * Runs inside the task to deserialize column family to bloom filter type<a name="line.740"></a>
-<span class="sourceLineNo">741</span>   * map from the configuration.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   *<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * @param conf to read the serialized values from<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   * @return a map from column family to the the configured bloom filter type<a name="line.744"></a>
-<span class="sourceLineNo">745</span>   */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>  @VisibleForTesting<a name="line.746"></a>
-<span class="sourceLineNo">747</span>  static Map&lt;byte[], BloomType&gt; createFamilyBloomTypeMap(Configuration conf) {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        BLOOM_TYPE_FAMILIES_CONF_KEY);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    Map&lt;byte[], BloomType&gt; bloomTypeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      BloomType bloomType = BloomType.valueOf(e.getValue());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      bloomTypeMap.put(e.getKey(), bloomType);<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    }<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    return bloomTypeMap;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>  }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>  /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   * Runs inside the task to deserialize column family to bloom filter param<a name="line.759"></a>
-<span class="sourceLineNo">760</span>   * map from the configuration.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>   *<a name="line.761"></a>
-<span class="sourceLineNo">762</span>   * @param conf to read the serialized values from<a name="line.762"></a>
-<span class="sourceLineNo">763</span>   * @return a map from column family to the the configured bloom filter param<a name="line.763"></a>
-<span class="sourceLineNo">764</span>   */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>  @VisibleForTesting<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  static Map&lt;byte[], String&gt; createFamilyBloomParamMap(Configuration conf) {<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    return createFamilyConfValueMap(conf, BLOOM_PARAM_FAMILIES_CONF_KEY);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>  }<a name="line.768"></a>
-<span class="sourceLineNo">769</span><a name="line.769"></a>
-<span class="sourceLineNo">770</span><a name="line.770"></a>
-<span class="sourceLineNo">771</span>  /**<a name="line.771"></a>
-<span class="sourceLineNo">772</span>   * Runs inside the task to deserialize column family to block size<a name="line.772"></a>
-<span class="sourceLineNo">773</span>   * map from the configuration.<a name="line.773"></a>
-<span class="sourceLineNo">774</span>   *<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * @param conf to read the serialized values from<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * @return a map from column family to the configured block size<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   */<a name="line.777"></a>
-<span class="sourceLineNo">778</span>  @VisibleForTesting<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  static Map&lt;byte[], Integer&gt; createFamilyBlockSizeMap(Configuration conf) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        BLOCK_SIZE_FAMILIES_CONF_KEY);<a name="line.781"></a>
-<span class="sourceLineNo">782</span>    Map&lt;byte[], Integer&gt; blockSizeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      Integer blockSize = Integer.parseInt(e.getValue());<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      blockSizeMap.put(e.getKey(), blockSize);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    return blockSizeMap;<a name="line.787"></a>
-<span class="sourceLineNo">788</span>  }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>  /**<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * Runs inside the task to deserialize column family to data block encoding<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * type map from the configuration.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   *<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   * @param conf to read the serialized values from<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @return a map from column family to HFileDataBlockEncoder for the<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   *         configured data block type for the family<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   */<a name="line.797"></a>
-<span class="sourceLineNo">798</span>  @VisibleForTesting<a name="line.798"></a>
-<span class="sourceLineNo">799</span>  static Map&lt;byte[], DataBlockEncoding&gt; createFamilyDataBlockEncodingMap(<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      Configuration conf) {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        DATABLOCK_ENCODING_FAMILIES_CONF_KEY);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    Map&lt;byte[], DataBlockEncoding&gt; encoderMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.804"></a>
-<span class="sourceLineNo">805</span>      encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    }<a name="line.806"></a>
-<span class="sourceLineNo">807</span>    return encoderMap;<a name="line.807"></a>
-<span class="sourceLineNo">808</span>  }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span><a name="line.810"></a>
-<span class="sourceLineNo">811</span>  /**<a name="line.811"></a>
-<span class="sourceLineNo">812</span>   * Run inside the task to deserialize column family to given conf value map.<a name="line.812"></a>
-<span class="sourceLineNo">813</span>   *<a name="line.813"></a>
-<span class="sourceLineNo">814</span>   * @param conf to read the serialized values from<a name="line.814"></a>
-<span class="sourceLineNo">815</span>   * @param confName conf key to read from the configuration<a name="line.815"></a>
-<span class="sourceLineNo">816</span>   * @return a map of column family to the given configuration value<a name="line.816"></a>
-<span class="sourceLineNo">817</span>   */<a name="line.817"></a>
-<span class="sourceLineNo">818</span>  private static Map&lt;byte[], String&gt; createFamilyConfValueMap(<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      Configuration conf, String confName) {<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    Map&lt;byte[], String&gt; confValMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    String confVal = conf.get(confName, "");<a name="line.821"></a>
-<span class="sourceLineNo">822</span>    for (String familyConf : confVal.split("&amp;")) {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      String[] familySplit = familyConf.split("=");<a name="line.823"></a>
-<span class="sourceLineNo">824</span>      if (familySplit.length != 2) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>        continue;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      try {<a name="line.827"></a>
-<span class="sourceLineNo">828</span>        confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], "UTF-8")),<a name="line.828"></a>
-<span class="sourceLineNo">829</span>            URLDecoder.decode(familySplit[1], "UTF-8"));<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      } catch (UnsupportedEncodingException e) {<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        // will not happen with UTF-8 encoding<a name="line.831"></a>
-<span class="sourceLineNo">832</span>        throw new AssertionError(e);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>    }<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    return confValMap;<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * Configure &lt;code&gt;job&lt;/code&gt; with a TotalOrderPartitioner, partitioning against<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * &lt;code&gt;splitPoints&lt;/code&gt;. Cleans up the partitions file after job exists.<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   */<a name="line.841"></a>
-<span class="sourceLineNo">842</span>  static void configurePartitioner(Job job, List&lt;ImmutableBytesWritable&gt; splitPoints, boolean<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          writeMultipleTables)<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      throws IOException {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>    Configuration conf = job.getConfiguration();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    // create the partitions file<a name="line.846"></a>
-<span class="sourceLineNo">847</span>    FileSystem fs = FileSystem.get(conf);<a name="line.847"></a>
-<span class="sourceLineNo">848</span>    String hbaseTmpFsDir =<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            fs.getHomeDirectory() + "/hbase-staging");<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    fs.makeQualified(partitionsPath);<a name="line.852"></a>
-<span class="sourceLineNo">853</span>    writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    fs.deleteOnExit(partitionsPath);<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    // configure job to use it<a name="line.856"></a>
-<span class="sourceLineNo">857</span>    job.setPartitionerClass(TotalOrderPartitioner.class);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    TotalOrderPartitioner.setPartitionFile(conf, partitionsPath);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")<a name="line.861"></a>
-<span class="sourceLineNo">862</span>  @VisibleForTesting<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static String serializeColumnFamilyAttribute(Function&lt;ColumnFamilyDescriptor, String&gt; fn, List&lt;TableDescriptor&gt; allTables)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws UnsupportedEncodingException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    StringBuilder attributeValue = new StringBuilder();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    int i = 0;<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    for (TableDescriptor tableDescriptor : allTables) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      if (tableDescriptor == null) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        // could happen with mock table instance<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        // CODEREVIEW: Can I set an empty string in conf if mock table instance?<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        return "";<a name="line.871"></a>
-<span class="sourceLineNo">872</span>      }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>        if (i++ &gt; 0) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>          attributeValue.append('&amp;');<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        attributeValue.append(URLEncoder.encode(<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            Bytes.toString(combineTableNameSuffix(tableDescriptor.getTableName().getName(), familyDescriptor.getName())),<a name="line.878"></a>
-<span class="sourceLineNo">879</span>            "UTF-8"));<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        attributeValue.append('=');<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        attributeValue.append(URLEncoder.encode(fn.apply(familyDescriptor), "UTF-8"));<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>    // Get rid of the last ampersand<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    return attributeValue.toString();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>  }<a name="line.886"></a>
-<span class="sourceLineNo">887</span><a name="line.887"></a>
-<span class="sourceLineNo">888</span>  /**<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   * Serialize column family to compression algorithm map to configuration.<a name="line.889"></a>
-<span class="sourceLineNo">890</span>   * Invoked while configuring the MR job for incremental load.<a name="line.890"></a>
-<span class="sourceLineNo">891</span>   *<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * @param tableDescriptor to read the properties from<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * @param conf to persist serialized values into<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   * @throws IOException<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   *           on failure to read column family descriptors<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   */<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  @VisibleForTesting<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; compressionDetails = familyDescriptor -&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>          familyDescriptor.getCompressionType().getName();<a name="line.899"></a>
-<span class="sourceLineNo">900</span><a name="line.900"></a>
-<span class="sourceLineNo">901</span>  /**<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   * Serialize column family to block size map to configuration. Invoked while<a name="line.902"></a>
-<span class="sourceLineNo">903</span>   * configuring the MR job for incremental load.<a name="line.903"></a>
-<span class="sourceLineNo">904</span>   *<a name="line.904"></a>
-<span class="sourceLineNo">905</span>   * @param tableDescriptor<a name="line.905"></a>
-<span class="sourceLineNo">906</span>   *          to read the properties from<a name="line.906"></a>
-<span class="sourceLineNo">907</span>   * @param conf<a name="line.907"></a>
-<span class="sourceLineNo">908</span>   *          to persist serialized values into<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   *<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @throws IOException<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   *           on failure to read column family descriptors<a name="line.911"></a>
-<span class="sourceLineNo">912</span>   */<a name="line.912"></a>
-<span class="sourceLineNo">913</span>  @VisibleForTesting<a name="line.913"></a>
-<span class="sourceLineNo">914</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; blockSizeDetails = familyDescriptor -&gt; String<a name="line.914"></a>
-<span class="sourceLineNo">915</span>          .valueOf(familyDescriptor.getBlocksize());<a name="line.915"></a>
-<span class="sourceLineNo">916</span><a name="line.916"></a>
-<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   * Serialize column family to bloom type map to configuration. Invoked while<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * configuring the MR job for incremental load.<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   *<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   * @param tableDescriptor<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   *          to read the properties from<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param conf<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   *          to persist serialized values into<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   *<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   * @throws IOException<a name="line.926"></a>
-<span class="sourceLineNo">927</span>   *           on failure to read column family descriptors<a name="line.927"></a>
-<span class="sourceLineNo">928</span>   */<a name="line.928"></a>
-<span class="sourceLineNo">929</span>  @VisibleForTesting<a name="line.929"></a>
-<span class="sourceLineNo">930</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomTypeDetails = familyDescriptor -&gt; {<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    String bloomType = familyDescriptor.getBloomFilterType().toString();<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    if (bloomType == null) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      bloomType = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();<a name="line.933"></a>
-<span class="sourceLineNo">934</span>    }<a name="line.934"></a>
-<span class="sourceLineNo">935</span>    return bloomType;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>  };<a name="line.936"></a>
-<span class="sourceLineNo">937</span><a name="line.937"></a>
-<span class="sourceLineNo">938</span>  /**<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   * Serialize column family to bloom param map to configuration. Invoked while<a name="line.939"></a>
-<span class="sourceLineNo">940</span>   * configuring the MR job for incremental load.<a name="line.940"></a>
-<span class="sourceLineNo">941</span>   *<a name="line.941"></a>
-<span class="sourceLineNo">942</span>   * @param tableDescriptor<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   *          to read the properties from<a name="line.943"></a>
-<span class="sourceLineNo">944</span>   * @param conf<a name="line.944"></a>
-<span class="sourceLineNo">945</span>   *          to persist serialized values into<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   *<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * @throws IOException<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   *           on failure to read column family descriptors<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   */<a name="line.949"></a>
-<span class="sourceLineNo">950</span>  @VisibleForTesting<a name="line.950"></a>
-<span class="sourceLineNo">951</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomParamDetails = familyDescriptor -&gt; {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    BloomType bloomType = familyDescriptor.getBloomFilterType();<a name="line.952"></a>
-<span class="sourceLineNo">953</span>    String bloomParam = "";<a name="line.953"></a>
-<span class="sourceLineNo">954</span>    if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY);<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    } else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.DELIMITER_KEY);<a name="line.957"></a>
-<span class="sourceLineNo">958</span>    }<a name="line.958"></a>
-<span class="sourceLineNo">959</span>    return bloomParam;<a name="line.959"></a>
-<span class="sourceLineNo">960</span>  };<a name="line.960"></a>
-<span class="sourceLineNo">961</span><a name="line.961"></a>
-<span class="sourceLineNo">962</span>  /**<a name="line.962"></a>
-<span class="sourceLineNo">963</span>   * Serialize column family to data block encoding map to configuration.<a name="line.963"></a>
-<span class="sourceLineNo">964</span>   * Invoked while configuring the MR job for incremental load.<a name="line.964"></a>
-<span class="sourceLineNo">965</span>   *<a name="line.965"></a>
-<span class="sourceLineNo">966</span>   * @param tableDescriptor<a name="line.966"></a>
-<span class="sourceLineNo">967</span>   *          to read the properties from<a name="line.967"></a>
-<span class="sourceLineNo">968</span>   * @param conf<a name="line.968"></a>
-<span class="sourceLineNo">969</span>   *          to persist serialized values into<a name="line.969"></a>
-<span class="sourceLineNo">970</span>   * @throws IOException<a name="line.970"></a>
-<span class="sourceLineNo">971</span>   *           on failure to read column family descriptors<a name="line.971"></a>
-<span class="sourceLineNo">972</span>   */<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  @VisibleForTesting<a name="line.973"></a>
-<span class="sourceLineNo">974</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; dataBlockEncodingDetails = familyDescriptor -&gt; {<a name="line.974"></a>
-<span class="sourceLineNo">975</span>    DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    if (encoding == null) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      encoding = DataBlockEncoding.NONE;<a name="line.977"></a>
-<span class="sourceLineNo">978</span>    }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>    return encoding.toString();<a name="line.979"></a>
-<span class="sourceLineNo">980</span>  };<a name="line.980"></a>
-<span class="sourceLineNo">981</span><a name="line.981"></a>
-<span class="sourceLineNo">982</span>}<a name="line.982"></a>
+<span class="sourceLineNo">678</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.678"></a>
+<span class="sourceLineNo">679</span>        tableDescriptors));<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.680"></a>
+<span class="sourceLineNo">681</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.683"></a>
+<span class="sourceLineNo">684</span>    TableMapReduceUtil.initCredentials(job);<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.685"></a>
+<span class="sourceLineNo">686</span>  }<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.688"></a>
+<span class="sourceLineNo">689</span>      IOException {<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    Configuration conf = job.getConfiguration();<a name="line.690"></a>
+<span class="sourceLineNo">691</span><a name="line.691"></a>
+<span class="sourceLineNo">692</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.692"></a>
+<span class="sourceLineNo">693</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.694"></a>
+<span class="sourceLineNo">695</span><a name="line.695"></a>
+<span class="sourceLineNo">696</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.696"></a>
+<span class="sourceLineNo">697</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.697"></a>
+<span class="sourceLineNo">698</span><a name="line.698"></a>
+<span class="sourceLineNo">699</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    // Set compression algorithms based on column families<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  

<TRUNCATED>

[31/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
index 2559531..06869f5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
@@ -364,627 +364,625 @@
 <span class="sourceLineNo">356</span>   */<a name="line.356"></a>
 <span class="sourceLineNo">357</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.357"></a>
 <span class="sourceLineNo">358</span>       conf) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    Configuration tempConf = new Configuration(conf);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      CacheConfig cacheConf) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    int version = getFormatVersion(conf);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    switch (version) {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    case 2:<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.373"></a>
-<span class="sourceLineNo">374</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        "in hbase-site.xml)");<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    case 3:<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    default:<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          "format version " + version);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    }<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /**<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * An abstraction used by the block index.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public interface CachingBlockReader {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     * Read in a file block.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>     * @param offset offset to read.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>     * @param onDiskBlockSize size of the block<a name="line.393"></a>
-<span class="sourceLineNo">394</span>     * @param cacheBlock<a name="line.394"></a>
-<span class="sourceLineNo">395</span>     * @param pread<a name="line.395"></a>
-<span class="sourceLineNo">396</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.396"></a>
-<span class="sourceLineNo">397</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     *  caching efficiency of encoded data blocks)<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.401"></a>
-<span class="sourceLineNo">402</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.402"></a>
-<span class="sourceLineNo">403</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>     * @return Block wrapped in a ByteBuffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>     * @throws IOException<a name="line.405"></a>
-<span class="sourceLineNo">406</span>     */<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        throws IOException;<a name="line.411"></a>
-<span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>    /**<a name="line.413"></a>
-<span class="sourceLineNo">414</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.414"></a>
-<span class="sourceLineNo">415</span>     * @param block Block to be returned.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>     */<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    void returnBlock(HFileBlock block);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.423"></a>
-<span class="sourceLineNo">424</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.424"></a>
-<span class="sourceLineNo">425</span>     * write.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>     */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    String getName();<a name="line.427"></a>
+<span class="sourceLineNo">359</span>    return HFile.getWriterFactory(conf, CacheConfig.DISABLED);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      CacheConfig cacheConf) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    int version = getFormatVersion(conf);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    switch (version) {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    case 2:<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.371"></a>
+<span class="sourceLineNo">372</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        "in hbase-site.xml)");<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    case 3:<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    default:<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          "format version " + version);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * An abstraction used by the block index.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  public interface CachingBlockReader {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>     * Read in a file block.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>     * @param offset offset to read.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>     * @param onDiskBlockSize size of the block<a name="line.391"></a>
+<span class="sourceLineNo">392</span>     * @param cacheBlock<a name="line.392"></a>
+<span class="sourceLineNo">393</span>     * @param pread<a name="line.393"></a>
+<span class="sourceLineNo">394</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.394"></a>
+<span class="sourceLineNo">395</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.396"></a>
+<span class="sourceLineNo">397</span>     *  caching efficiency of encoded data blocks)<a name="line.397"></a>
+<span class="sourceLineNo">398</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.398"></a>
+<span class="sourceLineNo">399</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.399"></a>
+<span class="sourceLineNo">400</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.400"></a>
+<span class="sourceLineNo">401</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.401"></a>
+<span class="sourceLineNo">402</span>     * @return Block wrapped in a ByteBuffer.<a name="line.402"></a>
+<span class="sourceLineNo">403</span>     * @throws IOException<a name="line.403"></a>
+<span class="sourceLineNo">404</span>     */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>        throws IOException;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>    /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>     * @param block Block to be returned.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>     */<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    void returnBlock(HFileBlock block);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.422"></a>
+<span class="sourceLineNo">423</span>     * write.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>     */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    String getName();<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    CellComparator getComparator();<a name="line.427"></a>
 <span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    CellComparator getComparator();<a name="line.429"></a>
+<span class="sourceLineNo">429</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.429"></a>
 <span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.431"></a>
+<span class="sourceLineNo">431</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.431"></a>
 <span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.433"></a>
+<span class="sourceLineNo">433</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.433"></a>
 <span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.435"></a>
+<span class="sourceLineNo">435</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.437"></a>
+<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.437"></a>
 <span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.439"></a>
+<span class="sourceLineNo">439</span>    long length();<a name="line.439"></a>
 <span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>    long length();<a name="line.441"></a>
+<span class="sourceLineNo">441</span>    long getEntries();<a name="line.441"></a>
 <span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    long getEntries();<a name="line.443"></a>
+<span class="sourceLineNo">443</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.443"></a>
 <span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.445"></a>
+<span class="sourceLineNo">445</span>    long indexSize();<a name="line.445"></a>
 <span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>    long indexSize();<a name="line.447"></a>
+<span class="sourceLineNo">447</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.447"></a>
 <span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.449"></a>
+<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.449"></a>
 <span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.451"></a>
+<span class="sourceLineNo">451</span>    FixedFileTrailer getTrailer();<a name="line.451"></a>
 <span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>    FixedFileTrailer getTrailer();<a name="line.453"></a>
+<span class="sourceLineNo">453</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.453"></a>
 <span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.455"></a>
+<span class="sourceLineNo">455</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.455"></a>
 <span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.457"></a>
+<span class="sourceLineNo">457</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.457"></a>
 <span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    /**<a name="line.461"></a>
-<span class="sourceLineNo">462</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.462"></a>
-<span class="sourceLineNo">463</span>     * {@link HFile} version.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>     * Knows nothing about how that metadata is structured.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>     */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    /**<a name="line.468"></a>
-<span class="sourceLineNo">469</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.469"></a>
-<span class="sourceLineNo">470</span>     * {@link HFile}  version.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>     * Knows nothing about how that metadata is structured.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>     */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.473"></a>
+<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.460"></a>
+<span class="sourceLineNo">461</span>     * {@link HFile} version.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>     * Knows nothing about how that metadata is structured.<a name="line.462"></a>
+<span class="sourceLineNo">463</span>     */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    /**<a name="line.466"></a>
+<span class="sourceLineNo">467</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.467"></a>
+<span class="sourceLineNo">468</span>     * {@link HFile}  version.<a name="line.468"></a>
+<span class="sourceLineNo">469</span>     * Knows nothing about how that metadata is structured.<a name="line.469"></a>
+<span class="sourceLineNo">470</span>     */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>    Path getPath();<a name="line.473"></a>
 <span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>    Path getPath();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    /** Close method with optional evictOnClose */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    void close(boolean evictOnClose) throws IOException;<a name="line.478"></a>
+<span class="sourceLineNo">475</span>    /** Close method with optional evictOnClose */<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    void close(boolean evictOnClose) throws IOException;<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.478"></a>
 <span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.480"></a>
+<span class="sourceLineNo">480</span>    boolean hasMVCCInfo();<a name="line.480"></a>
 <span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    boolean hasMVCCInfo();<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>     * Return the file context of the HFile this reader belongs to<a name="line.485"></a>
-<span class="sourceLineNo">486</span>     */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    HFileContext getFileContext();<a name="line.487"></a>
+<span class="sourceLineNo">482</span>    /**<a name="line.482"></a>
+<span class="sourceLineNo">483</span>     * Return the file context of the HFile this reader belongs to<a name="line.483"></a>
+<span class="sourceLineNo">484</span>     */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    HFileContext getFileContext();<a name="line.485"></a>
+<span class="sourceLineNo">486</span><a name="line.486"></a>
+<span class="sourceLineNo">487</span>    boolean isPrimaryReplicaReader();<a name="line.487"></a>
 <span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    boolean isPrimaryReplicaReader();<a name="line.489"></a>
+<span class="sourceLineNo">489</span>    boolean shouldIncludeMemStoreTS();<a name="line.489"></a>
 <span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>    boolean shouldIncludeMemStoreTS();<a name="line.491"></a>
+<span class="sourceLineNo">491</span>    boolean isDecodeMemStoreTS();<a name="line.491"></a>
 <span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    boolean isDecodeMemStoreTS();<a name="line.493"></a>
+<span class="sourceLineNo">493</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.493"></a>
 <span class="sourceLineNo">494</span><a name="line.494"></a>
-<span class="sourceLineNo">495</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>    @VisibleForTesting<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>    @VisibleForTesting<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    boolean prefetchComplete();<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>    /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.504"></a>
-<span class="sourceLineNo">505</span>     * implementation should take care of thread safety.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>     */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    void unbufferStream();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Method returns the reader given the specified arguments.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   *<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * @param path hfile's path<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * @param fsdis stream of path's file<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param size max size of the trailer.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hfs<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return an appropriate instance of HFileReader<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      justification="Intentional")<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    FixedFileTrailer trailer = null;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    try {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      switch (trailer.getMajorVersion()) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        case 2:<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.535"></a>
-<span class="sourceLineNo">536</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        case 3:<a name="line.537"></a>
-<span class="sourceLineNo">538</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.538"></a>
-<span class="sourceLineNo">539</span>              primaryReplicaReader, conf);<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        default:<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } catch (Throwable t) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      IOUtils.closeQuietly(fsdis);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    } finally {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      fsdis.unbuffer();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>  /**<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * The sockets and the file descriptors held by the method parameter<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * that no other threads have access to the same passed reference.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param fs A file system<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @param path Path to HFile<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * @param fsdis a stream of path's file<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @param size max size of the trailer.<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.559"></a>
-<span class="sourceLineNo">560</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.560"></a>
-<span class="sourceLineNo">561</span>   * @param conf Configuration<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @return A version specific Hfile Reader<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      throws IOException {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    HFileSystem hfs = null;<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // the filesystem.<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (!(fs instanceof HFileSystem)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      hfs = new HFileSystem(fs);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    } else {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      hfs = (HFileSystem) fs;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    }<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
-<span class="sourceLineNo">583</span>  * Creates reader with cache configuration disabled<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  * @param fs filesystem<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  * @param path Path to file to read<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  * @return an active Reader instance<a name="line.586"></a>
-<span class="sourceLineNo">587</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  */<a name="line.589"></a>
-<span class="sourceLineNo">590</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      throws IOException {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.592"></a>
-<span class="sourceLineNo">593</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>  }<a name="line.595"></a>
-<span class="sourceLineNo">596</span><a name="line.596"></a>
-<span class="sourceLineNo">597</span>  /**<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   * @param fs filesystem<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   * @param path Path to file to read<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * @param cacheConf This must not be null. @see<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   * @return an active Reader instance<a name="line.603"></a>
-<span class="sourceLineNo">604</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.604"></a>
-<span class="sourceLineNo">605</span>   *           is corrupt/invalid.<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   */<a name="line.606"></a>
-<span class="sourceLineNo">607</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>  }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>  /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * The sockets and the file descriptors held by the method parameter<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.618"></a>
-<span class="sourceLineNo">619</span>   * that no other threads have access to the same passed reference.<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
-<span class="sourceLineNo">622</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.625"></a>
-<span class="sourceLineNo">626</span>  }<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>  /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @param fs filesystem<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * @param path Path to file to verify<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * @throws IOException if failed to read from the underlying stream<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   */<a name="line.634"></a>
-<span class="sourceLineNo">635</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>  }<a name="line.637"></a>
-<span class="sourceLineNo">638</span><a name="line.638"></a>
-<span class="sourceLineNo">639</span>  /**<a name="line.639"></a>
-<span class="sourceLineNo">640</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>   * @param fs filesystem<a name="line.641"></a>
-<span class="sourceLineNo">642</span>   * @param fileStatus the file to verify<a name="line.642"></a>
-<span class="sourceLineNo">643</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.643"></a>
-<span class="sourceLineNo">644</span>   * @throws IOException if failed to read from the underlying stream<a name="line.644"></a>
-<span class="sourceLineNo">645</span>   */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    final Path path = fileStatus.getPath();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    final long size = fileStatus.getLen();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try (FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path)) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      return true;<a name="line.654"></a>
-<span class="sourceLineNo">655</span>    } catch (IllegalArgumentException e) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return false;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public FileInfo() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      super();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.680"></a>
-<span class="sourceLineNo">681</span>     * key prefix.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>     *<a name="line.682"></a>
-<span class="sourceLineNo">683</span>     * @param k key to add<a name="line.683"></a>
-<span class="sourceLineNo">684</span>     * @param v value to add<a name="line.684"></a>
-<span class="sourceLineNo">685</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.685"></a>
-<span class="sourceLineNo">686</span>     *          with the reserved prefix<a name="line.686"></a>
-<span class="sourceLineNo">687</span>     * @return this file info object<a name="line.687"></a>
-<span class="sourceLineNo">688</span>     * @throws IOException if the key or value is invalid<a name="line.688"></a>
-<span class="sourceLineNo">689</span>     */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        final boolean checkPrefix) throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (k == null || v == null) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        throw new NullPointerException("Key nor value may be null");<a name="line.693"></a>
-<span class="sourceLineNo">694</span>      }<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.696"></a>
-<span class="sourceLineNo">697</span>            + " are reserved");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      }<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      put(k, v);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return this;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    public void clear() {<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.map.clear();<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return map.comparator();<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
-<span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public boolean containsKey(Object key) {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      return map.containsKey(key);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public boolean containsValue(Object value) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return map.containsValue(value);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      return map.entrySet();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>    @Override<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    public boolean equals(Object o) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      return map.equals(o);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>    @Override<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    public byte[] firstKey() {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      return map.firstKey();<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    }<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    @Override<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    public byte[] get(Object key) {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      return map.get(key);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    @Override<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    public int hashCode() {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return map.hashCode();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    }<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>    @Override<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return this.map.headMap(toKey);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public boolean isEmpty() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      return map.isEmpty();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    @Override<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      return map.keySet();<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    }<a name="line.761"></a>
-<span class="sourceLineNo">762</span><a name="line.762"></a>
-<span class="sourceLineNo">763</span>    @Override<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    public byte[] lastKey() {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      return map.lastKey();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
-<span class="sourceLineNo">767</span><a name="line.767"></a>
-<span class="sourceLineNo">768</span>    @Override<a name="line.768"></a>
-<span class="sourceLineNo">769</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      return this.map.put(key, value);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    }<a name="line.771"></a>
-<span class="sourceLineNo">772</span><a name="line.772"></a>
-<span class="sourceLineNo">773</span>    @Override<a name="line.773"></a>
-<span class="sourceLineNo">774</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      this.map.putAll(m);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public byte[] remove(Object key) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      return this.map.remove(key);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>    @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    public int size() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return map.size();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>    @Override<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      return this.map.subMap(fromKey, toKey);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
-<span class="sourceLineNo">792</span><a name="line.792"></a>
-<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      return this.map.tailMap(fromKey);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
-<span class="sourceLineNo">797</span><a name="line.797"></a>
-<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    public Collection&lt;byte[]&gt; values() {<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return map.values();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
-<span class="sourceLineNo">802</span><a name="line.802"></a>
-<span class="sourceLineNo">803</span>    /**<a name="line.803"></a>
-<span class="sourceLineNo">804</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.804"></a>
-<span class="sourceLineNo">805</span>     * We write it as a protobuf.<a name="line.805"></a>
-<span class="sourceLineNo">806</span>     * @param out<a name="line.806"></a>
-<span class="sourceLineNo">807</span>     * @throws IOException<a name="line.807"></a>
-<span class="sourceLineNo">808</span>     * @see #read(DataInputStream)<a name="line.808"></a>
-<span class="sourceLineNo">809</span>     */<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    void write(final DataOutputStream out) throws IOException {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey()));<a name="line.814"></a>
-<span class="sourceLineNo">815</span>        bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue()));<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      builder.build().writeDelimitedTo(out);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>    /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.823"></a>
-<span class="sourceLineNo">824</span>     * Can deserialize protobuf of old Writables format.<a name="line.824"></a>
-<span class="sourceLineNo">825</span>     * @param in<a name="line.825"></a>
-<span class="sourceLineNo">826</span>     * @throws IOException<a name="line.826"></a>
-<span class="sourceLineNo">827</span>     * @see #write(DataOutputStream)<a name="line.827"></a>
-<span class="sourceLineNo">828</span>     */<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    void read(final DataInputStream in) throws IOException {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      byte [] pbuf = new byte[pblen];<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      if (in.markSupported()) in.mark(pblen);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int read = in.read(pbuf);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      } else {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>        if (in.markSupported()) {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>          in.reset();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          parseWritable(in);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        } else {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.843"></a>
-<span class="sourceLineNo">844</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.845"></a>
-<span class="sourceLineNo">846</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          parseWritable(new DataInputStream(sis));<a name="line.849"></a>
-<span class="sourceLineNo">850</span>        }<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      }<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * @throws IOException<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      this.map.clear();<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      // Read the number of entries in the map<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      int entries = in.readInt();<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      // Then read each key/value pair<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>        byte [] key = Bytes.readByteArray(in);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.867"></a>
-<span class="sourceLineNo">868</span>        in.readByte();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        byte [] value = Bytes.readByteArray(in);<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        this.map.put(key, value);<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    }<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    /**<a name="line.874"></a>
-<span class="sourceLineNo">875</span>     * Fill our map with content of the pb we read off disk<a name="line.875"></a>
-<span class="sourceLineNo">876</span>     * @param fip protobuf message to read<a name="line.876"></a>
-<span class="sourceLineNo">877</span>     */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      this.map.clear();<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  }<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  /**<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * HFile.Writer.<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   *<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   * @return Array of strings, each represents a supported compression<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.896"></a>
-<span class="sourceLineNo">897</span>   *         supported.<a name="line.897"></a>
-<span class="sourceLineNo">898</span>   *         &lt;ul&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>   *         &lt;li&gt;"none" - No compression.<a name="line.899"></a>
-<span class="sourceLineNo">900</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>   *         &lt;/ul&gt;<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   */<a name="line.902"></a>
-<span class="sourceLineNo">903</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>    return Compression.getSupportedAlgorithms();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>  }<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>  // Utility methods.<a name="line.907"></a>
-<span class="sourceLineNo">908</span>  /*<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   * @param l Long to convert to an int.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   */<a name="line.911"></a>
-<span class="sourceLineNo">912</span>  static int longToInt(final long l) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>  }<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>  /**<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   * empty list.<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   *<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   * @param fs  The file system reference.<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param regionDir  The region directory to scan.<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   * @return The list of files found.<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   * @throws IOException When scanning the files fails.<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   */<a name="line.926"></a>
-<span class="sourceLineNo">927</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      throws IOException {<a name="line.928"></a>
-<span class="sourceLineNo">929</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;&gt;();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    for(FileStatus dir : familyDirs) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      for (FileStatus file : files) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>        if (!file.isDirectory() &amp;&amp;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.936"></a>
-<span class="sourceLineNo">937</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>          regionHFiles.add(file.getPath());<a name="line.938"></a>
-<span class="sourceLineNo">939</span>        }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      }<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return regionHFiles;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * indicate that this is not a software error, but corrupted input.<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   *<a name="line.950"></a>
-<span class="sourceLineNo">951</span>   * @param version an HFile version<a name="line.951"></a>
-<span class="sourceLineNo">952</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.952"></a>
-<span class="sourceLineNo">953</span>   */<a name="line.953"></a>
-<span class="sourceLineNo">954</span>  public static void checkFormatVersion(int version)<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      throws IllegalArgumentException {<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.957"></a>
-<span class="sourceLineNo">958</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.958"></a>
-<span class="sourceLineNo">959</span>          + MAX_FORMAT_VERSION + ")");<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    }<a name="line.960"></a>
-<span class="sourceLineNo">961</span>  }<a name="line.961"></a>
-<span class="sourceLineNo">962</span><a name="line.962"></a>
-<span class="sourceLineNo">963</span><a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public static void checkHFileVersion(final Configuration c) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>    int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    if (version &lt; MAX_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>      throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY +<a name="line.967"></a>
-<span class="sourceLineNo">968</span>        " (in your hbase-*.xml files) is " + version + " which does not match " +<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        MAX_FORMAT_VERSION +<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        "; are you running with a configuration from an older or newer hbase install (an " +<a name="line.970"></a>
-<span class="sourceLineNo">971</span>        "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?");<a name="line.971"></a>
-<span class="sourceLineNo">972</span>    }<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  }<a name="line.973"></a>
-<span class="sourceLineNo">974</span><a name="line.974"></a>
-<span class="sourceLineNo">975</span>  public static void main(String[] args) throws Exception {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    // delegate to preserve old behavior<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    HFilePrettyPrinter.main(args);<a name="line.977"></a>
-<span class="sourceLineNo">978</span>  }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>}<a name="line.979"></a>
+<span class="sourceLineNo">495</span>    @VisibleForTesting<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>    @VisibleForTesting<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    boolean prefetchComplete();<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>    /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>     * implementation should take care of thread safety.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>     */<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    void unbufferStream();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
+<span class="sourceLineNo">507</span><a name="line.507"></a>
+<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>
+<span class="sourceLineNo">509</span>   * Method returns the reader given the specified arguments.<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   *<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param path hfile's path<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param fsdis stream of path's file<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param size max size of the trailer.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * @param hfs<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * @return an appropriate instance of HFileReader<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
+<span class="sourceLineNo">521</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      justification="Intentional")<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      throws IOException {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    FixedFileTrailer trailer = null;<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    try {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      switch (trailer.getMajorVersion()) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        case 2:<a name="line.532"></a>
+<span class="sourceLineNo">533</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        case 3:<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              primaryReplicaReader, conf);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        default:<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    } catch (Throwable t) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      IOUtils.closeQuietly(fsdis);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    } finally {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      fsdis.unbuffer();<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * The sockets and the file descriptors held by the method parameter<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * that no other threads have access to the same passed reference.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * @param fs A file system<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * @param path Path to HFile<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * @param fsdis a stream of path's file<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   * @param size max size of the trailer.<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param conf Configuration<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return A version specific Hfile Reader<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      throws IOException {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    HFileSystem hfs = null;<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    // the filesystem.<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    if (!(fs instanceof HFileSystem)) {<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      hfs = new HFileSystem(fs);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    } else {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      hfs = (HFileSystem) fs;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
+<span class="sourceLineNo">581</span>  * Creates reader with cache configuration disabled<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  * @param fs filesystem<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  * @param path Path to file to read<a name="line.583"></a>
+<span class="sourceLineNo">584</span>  * @return an active Reader instance<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>  */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>  }<a name="line.593"></a>
+<span class="sourceLineNo">594</span><a name="line.594"></a>
+<span class="sourceLineNo">595</span>  /**<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   * @param fs filesystem<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   * @param path Path to file to read<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * @param cacheConf This must not be null. @see<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * @return an active Reader instance<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   *           is corrupt/invalid.<a name="line.603"></a>
+<span class="sourceLineNo">604</span>   */<a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * The sockets and the file descriptors held by the method parameter<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * that no other threads have access to the same passed reference.<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
+<span class="sourceLineNo">619</span>  @VisibleForTesting<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * @param fs filesystem<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   * @param path Path to file to verify<a name="line.629"></a>
+<span class="sourceLineNo">630</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.630"></a>
+<span class="sourceLineNo">631</span>   * @throws IOException if failed to read from the underlying stream<a name="line.631"></a>
+<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
+<span class="sourceLineNo">633</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  /**<a name="line.637"></a>
+<span class="sourceLineNo">638</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * @param fs filesystem<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * @param fileStatus the file to verify<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @throws IOException if failed to read from the underlying stream<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throws IOException {<a name="line.645"></a>
+<span class="sourceLin

<TRUNCATED>

[30/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
index 2559531..06869f5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
@@ -364,627 +364,625 @@
 <span class="sourceLineNo">356</span>   */<a name="line.356"></a>
 <span class="sourceLineNo">357</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.357"></a>
 <span class="sourceLineNo">358</span>       conf) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    Configuration tempConf = new Configuration(conf);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      CacheConfig cacheConf) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    int version = getFormatVersion(conf);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    switch (version) {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    case 2:<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.373"></a>
-<span class="sourceLineNo">374</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        "in hbase-site.xml)");<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    case 3:<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    default:<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          "format version " + version);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    }<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /**<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * An abstraction used by the block index.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public interface CachingBlockReader {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     * Read in a file block.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>     * @param offset offset to read.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>     * @param onDiskBlockSize size of the block<a name="line.393"></a>
-<span class="sourceLineNo">394</span>     * @param cacheBlock<a name="line.394"></a>
-<span class="sourceLineNo">395</span>     * @param pread<a name="line.395"></a>
-<span class="sourceLineNo">396</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.396"></a>
-<span class="sourceLineNo">397</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     *  caching efficiency of encoded data blocks)<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.401"></a>
-<span class="sourceLineNo">402</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.402"></a>
-<span class="sourceLineNo">403</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>     * @return Block wrapped in a ByteBuffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>     * @throws IOException<a name="line.405"></a>
-<span class="sourceLineNo">406</span>     */<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        throws IOException;<a name="line.411"></a>
-<span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>    /**<a name="line.413"></a>
-<span class="sourceLineNo">414</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.414"></a>
-<span class="sourceLineNo">415</span>     * @param block Block to be returned.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>     */<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    void returnBlock(HFileBlock block);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.423"></a>
-<span class="sourceLineNo">424</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.424"></a>
-<span class="sourceLineNo">425</span>     * write.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>     */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    String getName();<a name="line.427"></a>
+<span class="sourceLineNo">359</span>    return HFile.getWriterFactory(conf, CacheConfig.DISABLED);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      CacheConfig cacheConf) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    int version = getFormatVersion(conf);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    switch (version) {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    case 2:<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.371"></a>
+<span class="sourceLineNo">372</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        "in hbase-site.xml)");<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    case 3:<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    default:<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          "format version " + version);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * An abstraction used by the block index.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  public interface CachingBlockReader {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>     * Read in a file block.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>     * @param offset offset to read.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>     * @param onDiskBlockSize size of the block<a name="line.391"></a>
+<span class="sourceLineNo">392</span>     * @param cacheBlock<a name="line.392"></a>
+<span class="sourceLineNo">393</span>     * @param pread<a name="line.393"></a>
+<span class="sourceLineNo">394</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.394"></a>
+<span class="sourceLineNo">395</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.396"></a>
+<span class="sourceLineNo">397</span>     *  caching efficiency of encoded data blocks)<a name="line.397"></a>
+<span class="sourceLineNo">398</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.398"></a>
+<span class="sourceLineNo">399</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.399"></a>
+<span class="sourceLineNo">400</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.400"></a>
+<span class="sourceLineNo">401</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.401"></a>
+<span class="sourceLineNo">402</span>     * @return Block wrapped in a ByteBuffer.<a name="line.402"></a>
+<span class="sourceLineNo">403</span>     * @throws IOException<a name="line.403"></a>
+<span class="sourceLineNo">404</span>     */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>        throws IOException;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>    /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>     * @param block Block to be returned.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>     */<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    void returnBlock(HFileBlock block);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.422"></a>
+<span class="sourceLineNo">423</span>     * write.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>     */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    String getName();<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    CellComparator getComparator();<a name="line.427"></a>
 <span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    CellComparator getComparator();<a name="line.429"></a>
+<span class="sourceLineNo">429</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.429"></a>
 <span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.431"></a>
+<span class="sourceLineNo">431</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.431"></a>
 <span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.433"></a>
+<span class="sourceLineNo">433</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.433"></a>
 <span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.435"></a>
+<span class="sourceLineNo">435</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.437"></a>
+<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.437"></a>
 <span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.439"></a>
+<span class="sourceLineNo">439</span>    long length();<a name="line.439"></a>
 <span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>    long length();<a name="line.441"></a>
+<span class="sourceLineNo">441</span>    long getEntries();<a name="line.441"></a>
 <span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    long getEntries();<a name="line.443"></a>
+<span class="sourceLineNo">443</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.443"></a>
 <span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.445"></a>
+<span class="sourceLineNo">445</span>    long indexSize();<a name="line.445"></a>
 <span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>    long indexSize();<a name="line.447"></a>
+<span class="sourceLineNo">447</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.447"></a>
 <span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.449"></a>
+<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.449"></a>
 <span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.451"></a>
+<span class="sourceLineNo">451</span>    FixedFileTrailer getTrailer();<a name="line.451"></a>
 <span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>    FixedFileTrailer getTrailer();<a name="line.453"></a>
+<span class="sourceLineNo">453</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.453"></a>
 <span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.455"></a>
+<span class="sourceLineNo">455</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.455"></a>
 <span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.457"></a>
+<span class="sourceLineNo">457</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.457"></a>
 <span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    /**<a name="line.461"></a>
-<span class="sourceLineNo">462</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.462"></a>
-<span class="sourceLineNo">463</span>     * {@link HFile} version.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>     * Knows nothing about how that metadata is structured.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>     */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    /**<a name="line.468"></a>
-<span class="sourceLineNo">469</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.469"></a>
-<span class="sourceLineNo">470</span>     * {@link HFile}  version.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>     * Knows nothing about how that metadata is structured.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>     */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.473"></a>
+<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.460"></a>
+<span class="sourceLineNo">461</span>     * {@link HFile} version.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>     * Knows nothing about how that metadata is structured.<a name="line.462"></a>
+<span class="sourceLineNo">463</span>     */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    /**<a name="line.466"></a>
+<span class="sourceLineNo">467</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.467"></a>
+<span class="sourceLineNo">468</span>     * {@link HFile}  version.<a name="line.468"></a>
+<span class="sourceLineNo">469</span>     * Knows nothing about how that metadata is structured.<a name="line.469"></a>
+<span class="sourceLineNo">470</span>     */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>    Path getPath();<a name="line.473"></a>
 <span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>    Path getPath();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    /** Close method with optional evictOnClose */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    void close(boolean evictOnClose) throws IOException;<a name="line.478"></a>
+<span class="sourceLineNo">475</span>    /** Close method with optional evictOnClose */<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    void close(boolean evictOnClose) throws IOException;<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.478"></a>
 <span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.480"></a>
+<span class="sourceLineNo">480</span>    boolean hasMVCCInfo();<a name="line.480"></a>
 <span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    boolean hasMVCCInfo();<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>     * Return the file context of the HFile this reader belongs to<a name="line.485"></a>
-<span class="sourceLineNo">486</span>     */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    HFileContext getFileContext();<a name="line.487"></a>
+<span class="sourceLineNo">482</span>    /**<a name="line.482"></a>
+<span class="sourceLineNo">483</span>     * Return the file context of the HFile this reader belongs to<a name="line.483"></a>
+<span class="sourceLineNo">484</span>     */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    HFileContext getFileContext();<a name="line.485"></a>
+<span class="sourceLineNo">486</span><a name="line.486"></a>
+<span class="sourceLineNo">487</span>    boolean isPrimaryReplicaReader();<a name="line.487"></a>
 <span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    boolean isPrimaryReplicaReader();<a name="line.489"></a>
+<span class="sourceLineNo">489</span>    boolean shouldIncludeMemStoreTS();<a name="line.489"></a>
 <span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>    boolean shouldIncludeMemStoreTS();<a name="line.491"></a>
+<span class="sourceLineNo">491</span>    boolean isDecodeMemStoreTS();<a name="line.491"></a>
 <span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    boolean isDecodeMemStoreTS();<a name="line.493"></a>
+<span class="sourceLineNo">493</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.493"></a>
 <span class="sourceLineNo">494</span><a name="line.494"></a>
-<span class="sourceLineNo">495</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>    @VisibleForTesting<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>    @VisibleForTesting<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    boolean prefetchComplete();<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>    /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.504"></a>
-<span class="sourceLineNo">505</span>     * implementation should take care of thread safety.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>     */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    void unbufferStream();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Method returns the reader given the specified arguments.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   *<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * @param path hfile's path<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * @param fsdis stream of path's file<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param size max size of the trailer.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hfs<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return an appropriate instance of HFileReader<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      justification="Intentional")<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    FixedFileTrailer trailer = null;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    try {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      switch (trailer.getMajorVersion()) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        case 2:<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.535"></a>
-<span class="sourceLineNo">536</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        case 3:<a name="line.537"></a>
-<span class="sourceLineNo">538</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.538"></a>
-<span class="sourceLineNo">539</span>              primaryReplicaReader, conf);<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        default:<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } catch (Throwable t) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      IOUtils.closeQuietly(fsdis);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    } finally {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      fsdis.unbuffer();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>  /**<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * The sockets and the file descriptors held by the method parameter<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * that no other threads have access to the same passed reference.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param fs A file system<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @param path Path to HFile<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * @param fsdis a stream of path's file<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @param size max size of the trailer.<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.559"></a>
-<span class="sourceLineNo">560</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.560"></a>
-<span class="sourceLineNo">561</span>   * @param conf Configuration<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @return A version specific Hfile Reader<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      throws IOException {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    HFileSystem hfs = null;<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // the filesystem.<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (!(fs instanceof HFileSystem)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      hfs = new HFileSystem(fs);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    } else {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      hfs = (HFileSystem) fs;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    }<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
-<span class="sourceLineNo">583</span>  * Creates reader with cache configuration disabled<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  * @param fs filesystem<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  * @param path Path to file to read<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  * @return an active Reader instance<a name="line.586"></a>
-<span class="sourceLineNo">587</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  */<a name="line.589"></a>
-<span class="sourceLineNo">590</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      throws IOException {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.592"></a>
-<span class="sourceLineNo">593</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>  }<a name="line.595"></a>
-<span class="sourceLineNo">596</span><a name="line.596"></a>
-<span class="sourceLineNo">597</span>  /**<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   * @param fs filesystem<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   * @param path Path to file to read<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * @param cacheConf This must not be null. @see<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   * @return an active Reader instance<a name="line.603"></a>
-<span class="sourceLineNo">604</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.604"></a>
-<span class="sourceLineNo">605</span>   *           is corrupt/invalid.<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   */<a name="line.606"></a>
-<span class="sourceLineNo">607</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>  }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>  /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * The sockets and the file descriptors held by the method parameter<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.618"></a>
-<span class="sourceLineNo">619</span>   * that no other threads have access to the same passed reference.<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
-<span class="sourceLineNo">622</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.625"></a>
-<span class="sourceLineNo">626</span>  }<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>  /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @param fs filesystem<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * @param path Path to file to verify<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * @throws IOException if failed to read from the underlying stream<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   */<a name="line.634"></a>
-<span class="sourceLineNo">635</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>  }<a name="line.637"></a>
-<span class="sourceLineNo">638</span><a name="line.638"></a>
-<span class="sourceLineNo">639</span>  /**<a name="line.639"></a>
-<span class="sourceLineNo">640</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>   * @param fs filesystem<a name="line.641"></a>
-<span class="sourceLineNo">642</span>   * @param fileStatus the file to verify<a name="line.642"></a>
-<span class="sourceLineNo">643</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.643"></a>
-<span class="sourceLineNo">644</span>   * @throws IOException if failed to read from the underlying stream<a name="line.644"></a>
-<span class="sourceLineNo">645</span>   */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    final Path path = fileStatus.getPath();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    final long size = fileStatus.getLen();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try (FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path)) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      return true;<a name="line.654"></a>
-<span class="sourceLineNo">655</span>    } catch (IllegalArgumentException e) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return false;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public FileInfo() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      super();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.680"></a>
-<span class="sourceLineNo">681</span>     * key prefix.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>     *<a name="line.682"></a>
-<span class="sourceLineNo">683</span>     * @param k key to add<a name="line.683"></a>
-<span class="sourceLineNo">684</span>     * @param v value to add<a name="line.684"></a>
-<span class="sourceLineNo">685</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.685"></a>
-<span class="sourceLineNo">686</span>     *          with the reserved prefix<a name="line.686"></a>
-<span class="sourceLineNo">687</span>     * @return this file info object<a name="line.687"></a>
-<span class="sourceLineNo">688</span>     * @throws IOException if the key or value is invalid<a name="line.688"></a>
-<span class="sourceLineNo">689</span>     */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        final boolean checkPrefix) throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (k == null || v == null) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        throw new NullPointerException("Key nor value may be null");<a name="line.693"></a>
-<span class="sourceLineNo">694</span>      }<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.696"></a>
-<span class="sourceLineNo">697</span>            + " are reserved");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      }<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      put(k, v);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return this;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    public void clear() {<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.map.clear();<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return map.comparator();<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
-<span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public boolean containsKey(Object key) {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      return map.containsKey(key);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public boolean containsValue(Object value) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return map.containsValue(value);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      return map.entrySet();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>    @Override<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    public boolean equals(Object o) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      return map.equals(o);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>    @Override<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    public byte[] firstKey() {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      return map.firstKey();<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    }<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    @Override<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    public byte[] get(Object key) {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      return map.get(key);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    @Override<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    public int hashCode() {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return map.hashCode();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    }<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>    @Override<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return this.map.headMap(toKey);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public boolean isEmpty() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      return map.isEmpty();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    @Override<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      return map.keySet();<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    }<a name="line.761"></a>
-<span class="sourceLineNo">762</span><a name="line.762"></a>
-<span class="sourceLineNo">763</span>    @Override<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    public byte[] lastKey() {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      return map.lastKey();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
-<span class="sourceLineNo">767</span><a name="line.767"></a>
-<span class="sourceLineNo">768</span>    @Override<a name="line.768"></a>
-<span class="sourceLineNo">769</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      return this.map.put(key, value);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    }<a name="line.771"></a>
-<span class="sourceLineNo">772</span><a name="line.772"></a>
-<span class="sourceLineNo">773</span>    @Override<a name="line.773"></a>
-<span class="sourceLineNo">774</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      this.map.putAll(m);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public byte[] remove(Object key) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      return this.map.remove(key);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>    @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    public int size() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return map.size();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>    @Override<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      return this.map.subMap(fromKey, toKey);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
-<span class="sourceLineNo">792</span><a name="line.792"></a>
-<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      return this.map.tailMap(fromKey);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
-<span class="sourceLineNo">797</span><a name="line.797"></a>
-<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    public Collection&lt;byte[]&gt; values() {<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return map.values();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
-<span class="sourceLineNo">802</span><a name="line.802"></a>
-<span class="sourceLineNo">803</span>    /**<a name="line.803"></a>
-<span class="sourceLineNo">804</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.804"></a>
-<span class="sourceLineNo">805</span>     * We write it as a protobuf.<a name="line.805"></a>
-<span class="sourceLineNo">806</span>     * @param out<a name="line.806"></a>
-<span class="sourceLineNo">807</span>     * @throws IOException<a name="line.807"></a>
-<span class="sourceLineNo">808</span>     * @see #read(DataInputStream)<a name="line.808"></a>
-<span class="sourceLineNo">809</span>     */<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    void write(final DataOutputStream out) throws IOException {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey()));<a name="line.814"></a>
-<span class="sourceLineNo">815</span>        bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue()));<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      builder.build().writeDelimitedTo(out);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>    /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.823"></a>
-<span class="sourceLineNo">824</span>     * Can deserialize protobuf of old Writables format.<a name="line.824"></a>
-<span class="sourceLineNo">825</span>     * @param in<a name="line.825"></a>
-<span class="sourceLineNo">826</span>     * @throws IOException<a name="line.826"></a>
-<span class="sourceLineNo">827</span>     * @see #write(DataOutputStream)<a name="line.827"></a>
-<span class="sourceLineNo">828</span>     */<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    void read(final DataInputStream in) throws IOException {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      byte [] pbuf = new byte[pblen];<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      if (in.markSupported()) in.mark(pblen);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int read = in.read(pbuf);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      } else {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>        if (in.markSupported()) {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>          in.reset();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          parseWritable(in);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        } else {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.843"></a>
-<span class="sourceLineNo">844</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.845"></a>
-<span class="sourceLineNo">846</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          parseWritable(new DataInputStream(sis));<a name="line.849"></a>
-<span class="sourceLineNo">850</span>        }<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      }<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * @throws IOException<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      this.map.clear();<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      // Read the number of entries in the map<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      int entries = in.readInt();<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      // Then read each key/value pair<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>        byte [] key = Bytes.readByteArray(in);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.867"></a>
-<span class="sourceLineNo">868</span>        in.readByte();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        byte [] value = Bytes.readByteArray(in);<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        this.map.put(key, value);<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    }<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    /**<a name="line.874"></a>
-<span class="sourceLineNo">875</span>     * Fill our map with content of the pb we read off disk<a name="line.875"></a>
-<span class="sourceLineNo">876</span>     * @param fip protobuf message to read<a name="line.876"></a>
-<span class="sourceLineNo">877</span>     */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      this.map.clear();<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  }<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  /**<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * HFile.Writer.<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   *<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   * @return Array of strings, each represents a supported compression<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.896"></a>
-<span class="sourceLineNo">897</span>   *         supported.<a name="line.897"></a>
-<span class="sourceLineNo">898</span>   *         &lt;ul&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>   *         &lt;li&gt;"none" - No compression.<a name="line.899"></a>
-<span class="sourceLineNo">900</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>   *         &lt;/ul&gt;<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   */<a name="line.902"></a>
-<span class="sourceLineNo">903</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>    return Compression.getSupportedAlgorithms();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>  }<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>  // Utility methods.<a name="line.907"></a>
-<span class="sourceLineNo">908</span>  /*<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   * @param l Long to convert to an int.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   */<a name="line.911"></a>
-<span class="sourceLineNo">912</span>  static int longToInt(final long l) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>  }<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>  /**<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   * empty list.<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   *<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   * @param fs  The file system reference.<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param regionDir  The region directory to scan.<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   * @return The list of files found.<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   * @throws IOException When scanning the files fails.<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   */<a name="line.926"></a>
-<span class="sourceLineNo">927</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      throws IOException {<a name="line.928"></a>
-<span class="sourceLineNo">929</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;&gt;();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    for(FileStatus dir : familyDirs) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      for (FileStatus file : files) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>        if (!file.isDirectory() &amp;&amp;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.936"></a>
-<span class="sourceLineNo">937</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>          regionHFiles.add(file.getPath());<a name="line.938"></a>
-<span class="sourceLineNo">939</span>        }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      }<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return regionHFiles;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * indicate that this is not a software error, but corrupted input.<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   *<a name="line.950"></a>
-<span class="sourceLineNo">951</span>   * @param version an HFile version<a name="line.951"></a>
-<span class="sourceLineNo">952</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.952"></a>
-<span class="sourceLineNo">953</span>   */<a name="line.953"></a>
-<span class="sourceLineNo">954</span>  public static void checkFormatVersion(int version)<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      throws IllegalArgumentException {<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.957"></a>
-<span class="sourceLineNo">958</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.958"></a>
-<span class="sourceLineNo">959</span>          + MAX_FORMAT_VERSION + ")");<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    }<a name="line.960"></a>
-<span class="sourceLineNo">961</span>  }<a name="line.961"></a>
-<span class="sourceLineNo">962</span><a name="line.962"></a>
-<span class="sourceLineNo">963</span><a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public static void checkHFileVersion(final Configuration c) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>    int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    if (version &lt; MAX_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>      throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY +<a name="line.967"></a>
-<span class="sourceLineNo">968</span>        " (in your hbase-*.xml files) is " + version + " which does not match " +<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        MAX_FORMAT_VERSION +<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        "; are you running with a configuration from an older or newer hbase install (an " +<a name="line.970"></a>
-<span class="sourceLineNo">971</span>        "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?");<a name="line.971"></a>
-<span class="sourceLineNo">972</span>    }<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  }<a name="line.973"></a>
-<span class="sourceLineNo">974</span><a name="line.974"></a>
-<span class="sourceLineNo">975</span>  public static void main(String[] args) throws Exception {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    // delegate to preserve old behavior<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    HFilePrettyPrinter.main(args);<a name="line.977"></a>
-<span class="sourceLineNo">978</span>  }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>}<a name="line.979"></a>
+<span class="sourceLineNo">495</span>    @VisibleForTesting<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>    @VisibleForTesting<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    boolean prefetchComplete();<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>    /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>     * implementation should take care of thread safety.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>     */<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    void unbufferStream();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
+<span class="sourceLineNo">507</span><a name="line.507"></a>
+<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>
+<span class="sourceLineNo">509</span>   * Method returns the reader given the specified arguments.<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   *<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param path hfile's path<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param fsdis stream of path's file<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param size max size of the trailer.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * @param hfs<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * @return an appropriate instance of HFileReader<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
+<span class="sourceLineNo">521</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      justification="Intentional")<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      throws IOException {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    FixedFileTrailer trailer = null;<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    try {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      switch (trailer.getMajorVersion()) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        case 2:<a name="line.532"></a>
+<span class="sourceLineNo">533</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        case 3:<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              primaryReplicaReader, conf);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        default:<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    } catch (Throwable t) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      IOUtils.closeQuietly(fsdis);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    } finally {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      fsdis.unbuffer();<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * The sockets and the file descriptors held by the method parameter<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * that no other threads have access to the same passed reference.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * @param fs A file system<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * @param path Path to HFile<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * @param fsdis a stream of path's file<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   * @param size max size of the trailer.<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param conf Configuration<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return A version specific Hfile Reader<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      throws IOException {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    HFileSystem hfs = null;<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    // the filesystem.<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    if (!(fs instanceof HFileSystem)) {<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      hfs = new HFileSystem(fs);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    } else {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      hfs = (HFileSystem) fs;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
+<span class="sourceLineNo">581</span>  * Creates reader with cache configuration disabled<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  * @param fs filesystem<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  * @param path Path to file to read<a name="line.583"></a>
+<span class="sourceLineNo">584</span>  * @return an active Reader instance<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>  */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>  }<a name="line.593"></a>
+<span class="sourceLineNo">594</span><a name="line.594"></a>
+<span class="sourceLineNo">595</span>  /**<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   * @param fs filesystem<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   * @param path Path to file to read<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * @param cacheConf This must not be null. @see<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * @return an active Reader instance<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   *           is corrupt/invalid.<a name="line.603"></a>
+<span class="sourceLineNo">604</span>   */<a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * The sockets and the file descriptors held by the method parameter<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * that no other threads have access to the same passed reference.<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
+<span class="sourceLineNo">619</span>  @VisibleForTesting<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * @param fs filesystem<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   * @param path Path to file to verify<a name="line.629"></a>
+<span class="sourceLineNo">630</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.630"></a>
+<span class="sourceLineNo">631</span>   * @throws IOException if failed to read from the underlying stream<a name="line.631"></a>
+<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
+<span class="sourceLineNo">633</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  /**<a name="line.637"></a>
+<span class="sourceLineNo">638</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * @param fs filesystem<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * @param fileStatus the file to verify<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @throws IOException if failed to read from the underlying stream<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throws IOException {<a name="line.645"></a>
+<span class="sourceLineNo">646</

<TRUNCATED>

[27/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
index 2559531..06869f5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.html
@@ -364,627 +364,625 @@
 <span class="sourceLineNo">356</span>   */<a name="line.356"></a>
 <span class="sourceLineNo">357</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.357"></a>
 <span class="sourceLineNo">358</span>       conf) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    Configuration tempConf = new Configuration(conf);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      CacheConfig cacheConf) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    int version = getFormatVersion(conf);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    switch (version) {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    case 2:<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.373"></a>
-<span class="sourceLineNo">374</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        "in hbase-site.xml)");<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    case 3:<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    default:<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          "format version " + version);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    }<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /**<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * An abstraction used by the block index.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public interface CachingBlockReader {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     * Read in a file block.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>     * @param offset offset to read.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>     * @param onDiskBlockSize size of the block<a name="line.393"></a>
-<span class="sourceLineNo">394</span>     * @param cacheBlock<a name="line.394"></a>
-<span class="sourceLineNo">395</span>     * @param pread<a name="line.395"></a>
-<span class="sourceLineNo">396</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.396"></a>
-<span class="sourceLineNo">397</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     *  caching efficiency of encoded data blocks)<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.401"></a>
-<span class="sourceLineNo">402</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.402"></a>
-<span class="sourceLineNo">403</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>     * @return Block wrapped in a ByteBuffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>     * @throws IOException<a name="line.405"></a>
-<span class="sourceLineNo">406</span>     */<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        throws IOException;<a name="line.411"></a>
-<span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>    /**<a name="line.413"></a>
-<span class="sourceLineNo">414</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.414"></a>
-<span class="sourceLineNo">415</span>     * @param block Block to be returned.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>     */<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    void returnBlock(HFileBlock block);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.423"></a>
-<span class="sourceLineNo">424</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.424"></a>
-<span class="sourceLineNo">425</span>     * write.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>     */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    String getName();<a name="line.427"></a>
+<span class="sourceLineNo">359</span>    return HFile.getWriterFactory(conf, CacheConfig.DISABLED);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      CacheConfig cacheConf) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    int version = getFormatVersion(conf);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    switch (version) {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    case 2:<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.371"></a>
+<span class="sourceLineNo">372</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        "in hbase-site.xml)");<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    case 3:<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    default:<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          "format version " + version);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * An abstraction used by the block index.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  public interface CachingBlockReader {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>     * Read in a file block.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>     * @param offset offset to read.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>     * @param onDiskBlockSize size of the block<a name="line.391"></a>
+<span class="sourceLineNo">392</span>     * @param cacheBlock<a name="line.392"></a>
+<span class="sourceLineNo">393</span>     * @param pread<a name="line.393"></a>
+<span class="sourceLineNo">394</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.394"></a>
+<span class="sourceLineNo">395</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.396"></a>
+<span class="sourceLineNo">397</span>     *  caching efficiency of encoded data blocks)<a name="line.397"></a>
+<span class="sourceLineNo">398</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.398"></a>
+<span class="sourceLineNo">399</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.399"></a>
+<span class="sourceLineNo">400</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.400"></a>
+<span class="sourceLineNo">401</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.401"></a>
+<span class="sourceLineNo">402</span>     * @return Block wrapped in a ByteBuffer.<a name="line.402"></a>
+<span class="sourceLineNo">403</span>     * @throws IOException<a name="line.403"></a>
+<span class="sourceLineNo">404</span>     */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>        throws IOException;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>    /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>     * @param block Block to be returned.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>     */<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    void returnBlock(HFileBlock block);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.422"></a>
+<span class="sourceLineNo">423</span>     * write.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>     */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    String getName();<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    CellComparator getComparator();<a name="line.427"></a>
 <span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    CellComparator getComparator();<a name="line.429"></a>
+<span class="sourceLineNo">429</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.429"></a>
 <span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.431"></a>
+<span class="sourceLineNo">431</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.431"></a>
 <span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.433"></a>
+<span class="sourceLineNo">433</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.433"></a>
 <span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.435"></a>
+<span class="sourceLineNo">435</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.437"></a>
+<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.437"></a>
 <span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.439"></a>
+<span class="sourceLineNo">439</span>    long length();<a name="line.439"></a>
 <span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>    long length();<a name="line.441"></a>
+<span class="sourceLineNo">441</span>    long getEntries();<a name="line.441"></a>
 <span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    long getEntries();<a name="line.443"></a>
+<span class="sourceLineNo">443</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.443"></a>
 <span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.445"></a>
+<span class="sourceLineNo">445</span>    long indexSize();<a name="line.445"></a>
 <span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>    long indexSize();<a name="line.447"></a>
+<span class="sourceLineNo">447</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.447"></a>
 <span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.449"></a>
+<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.449"></a>
 <span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.451"></a>
+<span class="sourceLineNo">451</span>    FixedFileTrailer getTrailer();<a name="line.451"></a>
 <span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>    FixedFileTrailer getTrailer();<a name="line.453"></a>
+<span class="sourceLineNo">453</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.453"></a>
 <span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.455"></a>
+<span class="sourceLineNo">455</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.455"></a>
 <span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.457"></a>
+<span class="sourceLineNo">457</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.457"></a>
 <span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    /**<a name="line.461"></a>
-<span class="sourceLineNo">462</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.462"></a>
-<span class="sourceLineNo">463</span>     * {@link HFile} version.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>     * Knows nothing about how that metadata is structured.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>     */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    /**<a name="line.468"></a>
-<span class="sourceLineNo">469</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.469"></a>
-<span class="sourceLineNo">470</span>     * {@link HFile}  version.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>     * Knows nothing about how that metadata is structured.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>     */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.473"></a>
+<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.460"></a>
+<span class="sourceLineNo">461</span>     * {@link HFile} version.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>     * Knows nothing about how that metadata is structured.<a name="line.462"></a>
+<span class="sourceLineNo">463</span>     */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    /**<a name="line.466"></a>
+<span class="sourceLineNo">467</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.467"></a>
+<span class="sourceLineNo">468</span>     * {@link HFile}  version.<a name="line.468"></a>
+<span class="sourceLineNo">469</span>     * Knows nothing about how that metadata is structured.<a name="line.469"></a>
+<span class="sourceLineNo">470</span>     */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>    Path getPath();<a name="line.473"></a>
 <span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>    Path getPath();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    /** Close method with optional evictOnClose */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    void close(boolean evictOnClose) throws IOException;<a name="line.478"></a>
+<span class="sourceLineNo">475</span>    /** Close method with optional evictOnClose */<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    void close(boolean evictOnClose) throws IOException;<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.478"></a>
 <span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.480"></a>
+<span class="sourceLineNo">480</span>    boolean hasMVCCInfo();<a name="line.480"></a>
 <span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    boolean hasMVCCInfo();<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>     * Return the file context of the HFile this reader belongs to<a name="line.485"></a>
-<span class="sourceLineNo">486</span>     */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    HFileContext getFileContext();<a name="line.487"></a>
+<span class="sourceLineNo">482</span>    /**<a name="line.482"></a>
+<span class="sourceLineNo">483</span>     * Return the file context of the HFile this reader belongs to<a name="line.483"></a>
+<span class="sourceLineNo">484</span>     */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    HFileContext getFileContext();<a name="line.485"></a>
+<span class="sourceLineNo">486</span><a name="line.486"></a>
+<span class="sourceLineNo">487</span>    boolean isPrimaryReplicaReader();<a name="line.487"></a>
 <span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    boolean isPrimaryReplicaReader();<a name="line.489"></a>
+<span class="sourceLineNo">489</span>    boolean shouldIncludeMemStoreTS();<a name="line.489"></a>
 <span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>    boolean shouldIncludeMemStoreTS();<a name="line.491"></a>
+<span class="sourceLineNo">491</span>    boolean isDecodeMemStoreTS();<a name="line.491"></a>
 <span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    boolean isDecodeMemStoreTS();<a name="line.493"></a>
+<span class="sourceLineNo">493</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.493"></a>
 <span class="sourceLineNo">494</span><a name="line.494"></a>
-<span class="sourceLineNo">495</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>    @VisibleForTesting<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>    @VisibleForTesting<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    boolean prefetchComplete();<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>    /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.504"></a>
-<span class="sourceLineNo">505</span>     * implementation should take care of thread safety.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>     */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    void unbufferStream();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Method returns the reader given the specified arguments.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   *<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * @param path hfile's path<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * @param fsdis stream of path's file<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param size max size of the trailer.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hfs<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return an appropriate instance of HFileReader<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      justification="Intentional")<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    FixedFileTrailer trailer = null;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    try {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      switch (trailer.getMajorVersion()) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        case 2:<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.535"></a>
-<span class="sourceLineNo">536</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        case 3:<a name="line.537"></a>
-<span class="sourceLineNo">538</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.538"></a>
-<span class="sourceLineNo">539</span>              primaryReplicaReader, conf);<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        default:<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } catch (Throwable t) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      IOUtils.closeQuietly(fsdis);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    } finally {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      fsdis.unbuffer();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>  /**<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * The sockets and the file descriptors held by the method parameter<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * that no other threads have access to the same passed reference.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param fs A file system<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @param path Path to HFile<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * @param fsdis a stream of path's file<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @param size max size of the trailer.<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.559"></a>
-<span class="sourceLineNo">560</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.560"></a>
-<span class="sourceLineNo">561</span>   * @param conf Configuration<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @return A version specific Hfile Reader<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      throws IOException {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    HFileSystem hfs = null;<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // the filesystem.<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (!(fs instanceof HFileSystem)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      hfs = new HFileSystem(fs);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    } else {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      hfs = (HFileSystem) fs;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    }<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
-<span class="sourceLineNo">583</span>  * Creates reader with cache configuration disabled<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  * @param fs filesystem<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  * @param path Path to file to read<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  * @return an active Reader instance<a name="line.586"></a>
-<span class="sourceLineNo">587</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  */<a name="line.589"></a>
-<span class="sourceLineNo">590</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      throws IOException {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.592"></a>
-<span class="sourceLineNo">593</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>  }<a name="line.595"></a>
-<span class="sourceLineNo">596</span><a name="line.596"></a>
-<span class="sourceLineNo">597</span>  /**<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   * @param fs filesystem<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   * @param path Path to file to read<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * @param cacheConf This must not be null. @see<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   * @return an active Reader instance<a name="line.603"></a>
-<span class="sourceLineNo">604</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.604"></a>
-<span class="sourceLineNo">605</span>   *           is corrupt/invalid.<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   */<a name="line.606"></a>
-<span class="sourceLineNo">607</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>  }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>  /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * The sockets and the file descriptors held by the method parameter<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.618"></a>
-<span class="sourceLineNo">619</span>   * that no other threads have access to the same passed reference.<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
-<span class="sourceLineNo">622</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.625"></a>
-<span class="sourceLineNo">626</span>  }<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>  /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @param fs filesystem<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * @param path Path to file to verify<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * @throws IOException if failed to read from the underlying stream<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   */<a name="line.634"></a>
-<span class="sourceLineNo">635</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>  }<a name="line.637"></a>
-<span class="sourceLineNo">638</span><a name="line.638"></a>
-<span class="sourceLineNo">639</span>  /**<a name="line.639"></a>
-<span class="sourceLineNo">640</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>   * @param fs filesystem<a name="line.641"></a>
-<span class="sourceLineNo">642</span>   * @param fileStatus the file to verify<a name="line.642"></a>
-<span class="sourceLineNo">643</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.643"></a>
-<span class="sourceLineNo">644</span>   * @throws IOException if failed to read from the underlying stream<a name="line.644"></a>
-<span class="sourceLineNo">645</span>   */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    final Path path = fileStatus.getPath();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    final long size = fileStatus.getLen();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try (FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path)) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      return true;<a name="line.654"></a>
-<span class="sourceLineNo">655</span>    } catch (IllegalArgumentException e) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return false;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public FileInfo() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      super();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.680"></a>
-<span class="sourceLineNo">681</span>     * key prefix.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>     *<a name="line.682"></a>
-<span class="sourceLineNo">683</span>     * @param k key to add<a name="line.683"></a>
-<span class="sourceLineNo">684</span>     * @param v value to add<a name="line.684"></a>
-<span class="sourceLineNo">685</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.685"></a>
-<span class="sourceLineNo">686</span>     *          with the reserved prefix<a name="line.686"></a>
-<span class="sourceLineNo">687</span>     * @return this file info object<a name="line.687"></a>
-<span class="sourceLineNo">688</span>     * @throws IOException if the key or value is invalid<a name="line.688"></a>
-<span class="sourceLineNo">689</span>     */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        final boolean checkPrefix) throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (k == null || v == null) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        throw new NullPointerException("Key nor value may be null");<a name="line.693"></a>
-<span class="sourceLineNo">694</span>      }<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.696"></a>
-<span class="sourceLineNo">697</span>            + " are reserved");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      }<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      put(k, v);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return this;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    public void clear() {<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.map.clear();<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return map.comparator();<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
-<span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public boolean containsKey(Object key) {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      return map.containsKey(key);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public boolean containsValue(Object value) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return map.containsValue(value);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      return map.entrySet();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>    @Override<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    public boolean equals(Object o) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      return map.equals(o);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>    @Override<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    public byte[] firstKey() {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      return map.firstKey();<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    }<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    @Override<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    public byte[] get(Object key) {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      return map.get(key);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    @Override<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    public int hashCode() {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return map.hashCode();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    }<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>    @Override<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return this.map.headMap(toKey);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public boolean isEmpty() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      return map.isEmpty();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    @Override<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      return map.keySet();<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    }<a name="line.761"></a>
-<span class="sourceLineNo">762</span><a name="line.762"></a>
-<span class="sourceLineNo">763</span>    @Override<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    public byte[] lastKey() {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      return map.lastKey();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
-<span class="sourceLineNo">767</span><a name="line.767"></a>
-<span class="sourceLineNo">768</span>    @Override<a name="line.768"></a>
-<span class="sourceLineNo">769</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      return this.map.put(key, value);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    }<a name="line.771"></a>
-<span class="sourceLineNo">772</span><a name="line.772"></a>
-<span class="sourceLineNo">773</span>    @Override<a name="line.773"></a>
-<span class="sourceLineNo">774</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      this.map.putAll(m);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public byte[] remove(Object key) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      return this.map.remove(key);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>    @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    public int size() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return map.size();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>    @Override<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      return this.map.subMap(fromKey, toKey);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
-<span class="sourceLineNo">792</span><a name="line.792"></a>
-<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      return this.map.tailMap(fromKey);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
-<span class="sourceLineNo">797</span><a name="line.797"></a>
-<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    public Collection&lt;byte[]&gt; values() {<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return map.values();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
-<span class="sourceLineNo">802</span><a name="line.802"></a>
-<span class="sourceLineNo">803</span>    /**<a name="line.803"></a>
-<span class="sourceLineNo">804</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.804"></a>
-<span class="sourceLineNo">805</span>     * We write it as a protobuf.<a name="line.805"></a>
-<span class="sourceLineNo">806</span>     * @param out<a name="line.806"></a>
-<span class="sourceLineNo">807</span>     * @throws IOException<a name="line.807"></a>
-<span class="sourceLineNo">808</span>     * @see #read(DataInputStream)<a name="line.808"></a>
-<span class="sourceLineNo">809</span>     */<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    void write(final DataOutputStream out) throws IOException {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey()));<a name="line.814"></a>
-<span class="sourceLineNo">815</span>        bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue()));<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      builder.build().writeDelimitedTo(out);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>    /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.823"></a>
-<span class="sourceLineNo">824</span>     * Can deserialize protobuf of old Writables format.<a name="line.824"></a>
-<span class="sourceLineNo">825</span>     * @param in<a name="line.825"></a>
-<span class="sourceLineNo">826</span>     * @throws IOException<a name="line.826"></a>
-<span class="sourceLineNo">827</span>     * @see #write(DataOutputStream)<a name="line.827"></a>
-<span class="sourceLineNo">828</span>     */<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    void read(final DataInputStream in) throws IOException {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      byte [] pbuf = new byte[pblen];<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      if (in.markSupported()) in.mark(pblen);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int read = in.read(pbuf);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      } else {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>        if (in.markSupported()) {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>          in.reset();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          parseWritable(in);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        } else {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.843"></a>
-<span class="sourceLineNo">844</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.845"></a>
-<span class="sourceLineNo">846</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          parseWritable(new DataInputStream(sis));<a name="line.849"></a>
-<span class="sourceLineNo">850</span>        }<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      }<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * @throws IOException<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      this.map.clear();<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      // Read the number of entries in the map<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      int entries = in.readInt();<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      // Then read each key/value pair<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>        byte [] key = Bytes.readByteArray(in);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.867"></a>
-<span class="sourceLineNo">868</span>        in.readByte();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        byte [] value = Bytes.readByteArray(in);<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        this.map.put(key, value);<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    }<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    /**<a name="line.874"></a>
-<span class="sourceLineNo">875</span>     * Fill our map with content of the pb we read off disk<a name="line.875"></a>
-<span class="sourceLineNo">876</span>     * @param fip protobuf message to read<a name="line.876"></a>
-<span class="sourceLineNo">877</span>     */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      this.map.clear();<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  }<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  /**<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * HFile.Writer.<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   *<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   * @return Array of strings, each represents a supported compression<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.896"></a>
-<span class="sourceLineNo">897</span>   *         supported.<a name="line.897"></a>
-<span class="sourceLineNo">898</span>   *         &lt;ul&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>   *         &lt;li&gt;"none" - No compression.<a name="line.899"></a>
-<span class="sourceLineNo">900</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>   *         &lt;/ul&gt;<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   */<a name="line.902"></a>
-<span class="sourceLineNo">903</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>    return Compression.getSupportedAlgorithms();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>  }<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>  // Utility methods.<a name="line.907"></a>
-<span class="sourceLineNo">908</span>  /*<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   * @param l Long to convert to an int.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   */<a name="line.911"></a>
-<span class="sourceLineNo">912</span>  static int longToInt(final long l) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>  }<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>  /**<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   * empty list.<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   *<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   * @param fs  The file system reference.<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param regionDir  The region directory to scan.<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   * @return The list of files found.<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   * @throws IOException When scanning the files fails.<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   */<a name="line.926"></a>
-<span class="sourceLineNo">927</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      throws IOException {<a name="line.928"></a>
-<span class="sourceLineNo">929</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;&gt;();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    for(FileStatus dir : familyDirs) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      for (FileStatus file : files) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>        if (!file.isDirectory() &amp;&amp;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.936"></a>
-<span class="sourceLineNo">937</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>          regionHFiles.add(file.getPath());<a name="line.938"></a>
-<span class="sourceLineNo">939</span>        }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      }<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return regionHFiles;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * indicate that this is not a software error, but corrupted input.<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   *<a name="line.950"></a>
-<span class="sourceLineNo">951</span>   * @param version an HFile version<a name="line.951"></a>
-<span class="sourceLineNo">952</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.952"></a>
-<span class="sourceLineNo">953</span>   */<a name="line.953"></a>
-<span class="sourceLineNo">954</span>  public static void checkFormatVersion(int version)<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      throws IllegalArgumentException {<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.957"></a>
-<span class="sourceLineNo">958</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.958"></a>
-<span class="sourceLineNo">959</span>          + MAX_FORMAT_VERSION + ")");<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    }<a name="line.960"></a>
-<span class="sourceLineNo">961</span>  }<a name="line.961"></a>
-<span class="sourceLineNo">962</span><a name="line.962"></a>
-<span class="sourceLineNo">963</span><a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public static void checkHFileVersion(final Configuration c) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>    int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    if (version &lt; MAX_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>      throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY +<a name="line.967"></a>
-<span class="sourceLineNo">968</span>        " (in your hbase-*.xml files) is " + version + " which does not match " +<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        MAX_FORMAT_VERSION +<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        "; are you running with a configuration from an older or newer hbase install (an " +<a name="line.970"></a>
-<span class="sourceLineNo">971</span>        "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?");<a name="line.971"></a>
-<span class="sourceLineNo">972</span>    }<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  }<a name="line.973"></a>
-<span class="sourceLineNo">974</span><a name="line.974"></a>
-<span class="sourceLineNo">975</span>  public static void main(String[] args) throws Exception {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    // delegate to preserve old behavior<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    HFilePrettyPrinter.main(args);<a name="line.977"></a>
-<span class="sourceLineNo">978</span>  }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>}<a name="line.979"></a>
+<span class="sourceLineNo">495</span>    @VisibleForTesting<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>    @VisibleForTesting<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    boolean prefetchComplete();<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>    /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>     * implementation should take care of thread safety.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>     */<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    void unbufferStream();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
+<span class="sourceLineNo">507</span><a name="line.507"></a>
+<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>
+<span class="sourceLineNo">509</span>   * Method returns the reader given the specified arguments.<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   *<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param path hfile's path<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param fsdis stream of path's file<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param size max size of the trailer.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * @param hfs<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * @return an appropriate instance of HFileReader<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
+<span class="sourceLineNo">521</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      justification="Intentional")<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      throws IOException {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    FixedFileTrailer trailer = null;<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    try {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      switch (trailer.getMajorVersion()) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        case 2:<a name="line.532"></a>
+<span class="sourceLineNo">533</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        case 3:<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              primaryReplicaReader, conf);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        default:<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    } catch (Throwable t) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      IOUtils.closeQuietly(fsdis);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    } finally {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      fsdis.unbuffer();<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * The sockets and the file descriptors held by the method parameter<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * that no other threads have access to the same passed reference.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * @param fs A file system<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * @param path Path to HFile<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * @param fsdis a stream of path's file<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   * @param size max size of the trailer.<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param conf Configuration<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return A version specific Hfile Reader<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      throws IOException {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    HFileSystem hfs = null;<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    // the filesystem.<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    if (!(fs instanceof HFileSystem)) {<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      hfs = new HFileSystem(fs);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    } else {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      hfs = (HFileSystem) fs;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
+<span class="sourceLineNo">581</span>  * Creates reader with cache configuration disabled<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  * @param fs filesystem<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  * @param path Path to file to read<a name="line.583"></a>
+<span class="sourceLineNo">584</span>  * @return an active Reader instance<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>  */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>  }<a name="line.593"></a>
+<span class="sourceLineNo">594</span><a name="line.594"></a>
+<span class="sourceLineNo">595</span>  /**<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   * @param fs filesystem<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   * @param path Path to file to read<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * @param cacheConf This must not be null. @see<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * @return an active Reader instance<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   *           is corrupt/invalid.<a name="line.603"></a>
+<span class="sourceLineNo">604</span>   */<a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * The sockets and the file descriptors held by the method parameter<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * that no other threads have access to the same passed reference.<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
+<span class="sourceLineNo">619</span>  @VisibleForTesting<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * @param fs filesystem<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   * @param path Path to file to verify<a name="line.629"></a>
+<span class="sourceLineNo">630</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.630"></a>
+<span class="sourceLineNo">631</span>   * @throws IOException if failed to read from the underlying stream<a name="line.631"></a>
+<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
+<span class="sourceLineNo">633</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  /**<a name="line.637"></a>
+<span class="sourceLineNo">638</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * @param fs filesystem<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * @param fileStatus the file to verify<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @throws IOException if failed to read from the underlying stream<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throws IOException {<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    final Path path = fileStat

<TRUNCATED>

[07/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="line.1543"></a>
-<spa

<TRUNCATED>

[12/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.PrintingErrorReporter.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unl

<TRUNCATED>

[28/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
index 2559531..06869f5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html
@@ -364,627 +364,625 @@
 <span class="sourceLineNo">356</span>   */<a name="line.356"></a>
 <span class="sourceLineNo">357</span>  public static final WriterFactory getWriterFactoryNoCache(Configuration<a name="line.357"></a>
 <span class="sourceLineNo">358</span>       conf) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    Configuration tempConf = new Configuration(conf);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    return HFile.getWriterFactory(conf, new CacheConfig(tempConf));<a name="line.361"></a>
-<span class="sourceLineNo">362</span>  }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>  /**<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.365"></a>
-<span class="sourceLineNo">366</span>   */<a name="line.366"></a>
-<span class="sourceLineNo">367</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      CacheConfig cacheConf) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    int version = getFormatVersion(conf);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    switch (version) {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    case 2:<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.373"></a>
-<span class="sourceLineNo">374</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.374"></a>
-<span class="sourceLineNo">375</span>        "in hbase-site.xml)");<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    case 3:<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    default:<a name="line.378"></a>
-<span class="sourceLineNo">379</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.379"></a>
-<span class="sourceLineNo">380</span>          "format version " + version);<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    }<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  /**<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * An abstraction used by the block index.<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   */<a name="line.388"></a>
-<span class="sourceLineNo">389</span>  public interface CachingBlockReader {<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    /**<a name="line.390"></a>
-<span class="sourceLineNo">391</span>     * Read in a file block.<a name="line.391"></a>
-<span class="sourceLineNo">392</span>     * @param offset offset to read.<a name="line.392"></a>
-<span class="sourceLineNo">393</span>     * @param onDiskBlockSize size of the block<a name="line.393"></a>
-<span class="sourceLineNo">394</span>     * @param cacheBlock<a name="line.394"></a>
-<span class="sourceLineNo">395</span>     * @param pread<a name="line.395"></a>
-<span class="sourceLineNo">396</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.396"></a>
-<span class="sourceLineNo">397</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.397"></a>
-<span class="sourceLineNo">398</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.398"></a>
-<span class="sourceLineNo">399</span>     *  caching efficiency of encoded data blocks)<a name="line.399"></a>
-<span class="sourceLineNo">400</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.400"></a>
-<span class="sourceLineNo">401</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.401"></a>
-<span class="sourceLineNo">402</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.402"></a>
-<span class="sourceLineNo">403</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.403"></a>
-<span class="sourceLineNo">404</span>     * @return Block wrapped in a ByteBuffer.<a name="line.404"></a>
-<span class="sourceLineNo">405</span>     * @throws IOException<a name="line.405"></a>
-<span class="sourceLineNo">406</span>     */<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.408"></a>
-<span class="sourceLineNo">409</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.409"></a>
-<span class="sourceLineNo">410</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.410"></a>
-<span class="sourceLineNo">411</span>        throws IOException;<a name="line.411"></a>
-<span class="sourceLineNo">412</span><a name="line.412"></a>
-<span class="sourceLineNo">413</span>    /**<a name="line.413"></a>
-<span class="sourceLineNo">414</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.414"></a>
-<span class="sourceLineNo">415</span>     * @param block Block to be returned.<a name="line.415"></a>
-<span class="sourceLineNo">416</span>     */<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    void returnBlock(HFileBlock block);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.420"></a>
-<span class="sourceLineNo">421</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.421"></a>
-<span class="sourceLineNo">422</span>    /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.423"></a>
-<span class="sourceLineNo">424</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.424"></a>
-<span class="sourceLineNo">425</span>     * write.<a name="line.425"></a>
-<span class="sourceLineNo">426</span>     */<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    String getName();<a name="line.427"></a>
+<span class="sourceLineNo">359</span>    return HFile.getWriterFactory(conf, CacheConfig.DISABLED);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  }<a name="line.360"></a>
+<span class="sourceLineNo">361</span><a name="line.361"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * Returns the factory to be used to create {@link HFile} writers<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   */<a name="line.364"></a>
+<span class="sourceLineNo">365</span>  public static final WriterFactory getWriterFactory(Configuration conf,<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      CacheConfig cacheConf) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    int version = getFormatVersion(conf);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    switch (version) {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    case 2:<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      throw new IllegalArgumentException("This should never happen. " +<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        "Did you change hfile.format.version to read v2? This version of the software writes v3" +<a name="line.371"></a>
+<span class="sourceLineNo">372</span>        " hfiles only (but it can read v2 files without having to update hfile.format.version " +<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        "in hbase-site.xml)");<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    case 3:<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      return new HFile.WriterFactory(conf, cacheConf);<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    default:<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      throw new IllegalArgumentException("Cannot create writer for HFile " +<a name="line.377"></a>
+<span class="sourceLineNo">378</span>          "format version " + version);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>  /**<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   * An abstraction used by the block index.<a name="line.383"></a>
+<span class="sourceLineNo">384</span>   * Implementations will check cache for any asked-for block and return cached block if found.<a name="line.384"></a>
+<span class="sourceLineNo">385</span>   * Otherwise, after reading from fs, will try and put block into cache before returning.<a name="line.385"></a>
+<span class="sourceLineNo">386</span>   */<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  public interface CachingBlockReader {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    /**<a name="line.388"></a>
+<span class="sourceLineNo">389</span>     * Read in a file block.<a name="line.389"></a>
+<span class="sourceLineNo">390</span>     * @param offset offset to read.<a name="line.390"></a>
+<span class="sourceLineNo">391</span>     * @param onDiskBlockSize size of the block<a name="line.391"></a>
+<span class="sourceLineNo">392</span>     * @param cacheBlock<a name="line.392"></a>
+<span class="sourceLineNo">393</span>     * @param pread<a name="line.393"></a>
+<span class="sourceLineNo">394</span>     * @param isCompaction is this block being read as part of a compaction<a name="line.394"></a>
+<span class="sourceLineNo">395</span>     * @param expectedBlockType the block type we are expecting to read with this read operation,<a name="line.395"></a>
+<span class="sourceLineNo">396</span>     *  or null to read whatever block type is available and avoid checking (that might reduce<a name="line.396"></a>
+<span class="sourceLineNo">397</span>     *  caching efficiency of encoded data blocks)<a name="line.397"></a>
+<span class="sourceLineNo">398</span>     * @param expectedDataBlockEncoding the data block encoding the caller is expecting data blocks<a name="line.398"></a>
+<span class="sourceLineNo">399</span>     *  to be in, or null to not perform this check and return the block irrespective of the<a name="line.399"></a>
+<span class="sourceLineNo">400</span>     *  encoding. This check only applies to data blocks and can be set to null when the caller is<a name="line.400"></a>
+<span class="sourceLineNo">401</span>     *  expecting to read a non-data block and has set expectedBlockType accordingly.<a name="line.401"></a>
+<span class="sourceLineNo">402</span>     * @return Block wrapped in a ByteBuffer.<a name="line.402"></a>
+<span class="sourceLineNo">403</span>     * @throws IOException<a name="line.403"></a>
+<span class="sourceLineNo">404</span>     */<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    HFileBlock readBlock(long offset, long onDiskBlockSize,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>        boolean cacheBlock, final boolean pread, final boolean isCompaction,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>        final boolean updateCacheMetrics, BlockType expectedBlockType,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>        DataBlockEncoding expectedDataBlockEncoding)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>        throws IOException;<a name="line.409"></a>
+<span class="sourceLineNo">410</span><a name="line.410"></a>
+<span class="sourceLineNo">411</span>    /**<a name="line.411"></a>
+<span class="sourceLineNo">412</span>     * Return the given block back to the cache, if it was obtained from cache.<a name="line.412"></a>
+<span class="sourceLineNo">413</span>     * @param block Block to be returned.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>     */<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    void returnBlock(HFileBlock block);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  }<a name="line.416"></a>
+<span class="sourceLineNo">417</span><a name="line.417"></a>
+<span class="sourceLineNo">418</span>  /** An interface used by clients to open and iterate an {@link HFile}. */<a name="line.418"></a>
+<span class="sourceLineNo">419</span>  public interface Reader extends Closeable, CachingBlockReader {<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>     * Returns this reader's "name". Usually the last component of the path.<a name="line.421"></a>
+<span class="sourceLineNo">422</span>     * Needs to be constant as the file is being moved to support caching on<a name="line.422"></a>
+<span class="sourceLineNo">423</span>     * write.<a name="line.423"></a>
+<span class="sourceLineNo">424</span>     */<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    String getName();<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>    CellComparator getComparator();<a name="line.427"></a>
 <span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>    CellComparator getComparator();<a name="line.429"></a>
+<span class="sourceLineNo">429</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.429"></a>
 <span class="sourceLineNo">430</span><a name="line.430"></a>
-<span class="sourceLineNo">431</span>    HFileScanner getScanner(boolean cacheBlocks, final boolean pread, final boolean isCompaction);<a name="line.431"></a>
+<span class="sourceLineNo">431</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.431"></a>
 <span class="sourceLineNo">432</span><a name="line.432"></a>
-<span class="sourceLineNo">433</span>    HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) throws IOException;<a name="line.433"></a>
+<span class="sourceLineNo">433</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.433"></a>
 <span class="sourceLineNo">434</span><a name="line.434"></a>
-<span class="sourceLineNo">435</span>    Map&lt;byte[], byte[]&gt; loadFileInfo() throws IOException;<a name="line.435"></a>
+<span class="sourceLineNo">435</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; getLastKey();<a name="line.437"></a>
+<span class="sourceLineNo">437</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.437"></a>
 <span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>    Optional&lt;Cell&gt; midKey() throws IOException;<a name="line.439"></a>
+<span class="sourceLineNo">439</span>    long length();<a name="line.439"></a>
 <span class="sourceLineNo">440</span><a name="line.440"></a>
-<span class="sourceLineNo">441</span>    long length();<a name="line.441"></a>
+<span class="sourceLineNo">441</span>    long getEntries();<a name="line.441"></a>
 <span class="sourceLineNo">442</span><a name="line.442"></a>
-<span class="sourceLineNo">443</span>    long getEntries();<a name="line.443"></a>
+<span class="sourceLineNo">443</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.443"></a>
 <span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>    Optional&lt;Cell&gt; getFirstKey();<a name="line.445"></a>
+<span class="sourceLineNo">445</span>    long indexSize();<a name="line.445"></a>
 <span class="sourceLineNo">446</span><a name="line.446"></a>
-<span class="sourceLineNo">447</span>    long indexSize();<a name="line.447"></a>
+<span class="sourceLineNo">447</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.447"></a>
 <span class="sourceLineNo">448</span><a name="line.448"></a>
-<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getFirstRowKey();<a name="line.449"></a>
+<span class="sourceLineNo">449</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.449"></a>
 <span class="sourceLineNo">450</span><a name="line.450"></a>
-<span class="sourceLineNo">451</span>    Optional&lt;byte[]&gt; getLastRowKey();<a name="line.451"></a>
+<span class="sourceLineNo">451</span>    FixedFileTrailer getTrailer();<a name="line.451"></a>
 <span class="sourceLineNo">452</span><a name="line.452"></a>
-<span class="sourceLineNo">453</span>    FixedFileTrailer getTrailer();<a name="line.453"></a>
+<span class="sourceLineNo">453</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.453"></a>
 <span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>    HFileBlockIndex.BlockIndexReader getDataBlockIndexReader();<a name="line.455"></a>
+<span class="sourceLineNo">455</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.455"></a>
 <span class="sourceLineNo">456</span><a name="line.456"></a>
-<span class="sourceLineNo">457</span>    HFileScanner getScanner(boolean cacheBlocks, boolean pread);<a name="line.457"></a>
+<span class="sourceLineNo">457</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.457"></a>
 <span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>    Compression.Algorithm getCompressionAlgorithm();<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    /**<a name="line.461"></a>
-<span class="sourceLineNo">462</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.462"></a>
-<span class="sourceLineNo">463</span>     * {@link HFile} version.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>     * Knows nothing about how that metadata is structured.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>     */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.466"></a>
-<span class="sourceLineNo">467</span><a name="line.467"></a>
-<span class="sourceLineNo">468</span>    /**<a name="line.468"></a>
-<span class="sourceLineNo">469</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.469"></a>
-<span class="sourceLineNo">470</span>     * {@link HFile}  version.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>     * Knows nothing about how that metadata is structured.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>     */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.473"></a>
+<span class="sourceLineNo">459</span>    /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>     * Retrieves general Bloom filter metadata as appropriate for each<a name="line.460"></a>
+<span class="sourceLineNo">461</span>     * {@link HFile} version.<a name="line.461"></a>
+<span class="sourceLineNo">462</span>     * Knows nothing about how that metadata is structured.<a name="line.462"></a>
+<span class="sourceLineNo">463</span>     */<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    DataInput getGeneralBloomFilterMetadata() throws IOException;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    /**<a name="line.466"></a>
+<span class="sourceLineNo">467</span>     * Retrieves delete family Bloom filter metadata as appropriate for each<a name="line.467"></a>
+<span class="sourceLineNo">468</span>     * {@link HFile}  version.<a name="line.468"></a>
+<span class="sourceLineNo">469</span>     * Knows nothing about how that metadata is structured.<a name="line.469"></a>
+<span class="sourceLineNo">470</span>     */<a name="line.470"></a>
+<span class="sourceLineNo">471</span>    DataInput getDeleteBloomFilterMetadata() throws IOException;<a name="line.471"></a>
+<span class="sourceLineNo">472</span><a name="line.472"></a>
+<span class="sourceLineNo">473</span>    Path getPath();<a name="line.473"></a>
 <span class="sourceLineNo">474</span><a name="line.474"></a>
-<span class="sourceLineNo">475</span>    Path getPath();<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>    /** Close method with optional evictOnClose */<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    void close(boolean evictOnClose) throws IOException;<a name="line.478"></a>
+<span class="sourceLineNo">475</span>    /** Close method with optional evictOnClose */<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    void close(boolean evictOnClose) throws IOException;<a name="line.476"></a>
+<span class="sourceLineNo">477</span><a name="line.477"></a>
+<span class="sourceLineNo">478</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.478"></a>
 <span class="sourceLineNo">479</span><a name="line.479"></a>
-<span class="sourceLineNo">480</span>    DataBlockEncoding getDataBlockEncoding();<a name="line.480"></a>
+<span class="sourceLineNo">480</span>    boolean hasMVCCInfo();<a name="line.480"></a>
 <span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    boolean hasMVCCInfo();<a name="line.482"></a>
-<span class="sourceLineNo">483</span><a name="line.483"></a>
-<span class="sourceLineNo">484</span>    /**<a name="line.484"></a>
-<span class="sourceLineNo">485</span>     * Return the file context of the HFile this reader belongs to<a name="line.485"></a>
-<span class="sourceLineNo">486</span>     */<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    HFileContext getFileContext();<a name="line.487"></a>
+<span class="sourceLineNo">482</span>    /**<a name="line.482"></a>
+<span class="sourceLineNo">483</span>     * Return the file context of the HFile this reader belongs to<a name="line.483"></a>
+<span class="sourceLineNo">484</span>     */<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    HFileContext getFileContext();<a name="line.485"></a>
+<span class="sourceLineNo">486</span><a name="line.486"></a>
+<span class="sourceLineNo">487</span>    boolean isPrimaryReplicaReader();<a name="line.487"></a>
 <span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>    boolean isPrimaryReplicaReader();<a name="line.489"></a>
+<span class="sourceLineNo">489</span>    boolean shouldIncludeMemStoreTS();<a name="line.489"></a>
 <span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>    boolean shouldIncludeMemStoreTS();<a name="line.491"></a>
+<span class="sourceLineNo">491</span>    boolean isDecodeMemStoreTS();<a name="line.491"></a>
 <span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    boolean isDecodeMemStoreTS();<a name="line.493"></a>
+<span class="sourceLineNo">493</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.493"></a>
 <span class="sourceLineNo">494</span><a name="line.494"></a>
-<span class="sourceLineNo">495</span>    DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>    @VisibleForTesting<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>    @VisibleForTesting<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    boolean prefetchComplete();<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>    /**<a name="line.503"></a>
-<span class="sourceLineNo">504</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.504"></a>
-<span class="sourceLineNo">505</span>     * implementation should take care of thread safety.<a name="line.505"></a>
-<span class="sourceLineNo">506</span>     */<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    void unbufferStream();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>  }<a name="line.508"></a>
-<span class="sourceLineNo">509</span><a name="line.509"></a>
-<span class="sourceLineNo">510</span>  /**<a name="line.510"></a>
-<span class="sourceLineNo">511</span>   * Method returns the reader given the specified arguments.<a name="line.511"></a>
-<span class="sourceLineNo">512</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.512"></a>
-<span class="sourceLineNo">513</span>   *<a name="line.513"></a>
-<span class="sourceLineNo">514</span>   * @param path hfile's path<a name="line.514"></a>
-<span class="sourceLineNo">515</span>   * @param fsdis stream of path's file<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param size max size of the trailer.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param hfs<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @return an appropriate instance of HFileReader<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      justification="Intentional")<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.526"></a>
-<span class="sourceLineNo">527</span>      throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    FixedFileTrailer trailer = null;<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    try {<a name="line.529"></a>
-<span class="sourceLineNo">530</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      switch (trailer.getMajorVersion()) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        case 2:<a name="line.534"></a>
-<span class="sourceLineNo">535</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.535"></a>
-<span class="sourceLineNo">536</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        case 3:<a name="line.537"></a>
-<span class="sourceLineNo">538</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.538"></a>
-<span class="sourceLineNo">539</span>              primaryReplicaReader, conf);<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        default:<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    } catch (Throwable t) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      IOUtils.closeQuietly(fsdis);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    } finally {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      fsdis.unbuffer();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>  }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>  /**<a name="line.551"></a>
-<span class="sourceLineNo">552</span>   * The sockets and the file descriptors held by the method parameter<a name="line.552"></a>
-<span class="sourceLineNo">553</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.553"></a>
-<span class="sourceLineNo">554</span>   * that no other threads have access to the same passed reference.<a name="line.554"></a>
-<span class="sourceLineNo">555</span>   * @param fs A file system<a name="line.555"></a>
-<span class="sourceLineNo">556</span>   * @param path Path to HFile<a name="line.556"></a>
-<span class="sourceLineNo">557</span>   * @param fsdis a stream of path's file<a name="line.557"></a>
-<span class="sourceLineNo">558</span>   * @param size max size of the trailer.<a name="line.558"></a>
-<span class="sourceLineNo">559</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.559"></a>
-<span class="sourceLineNo">560</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.560"></a>
-<span class="sourceLineNo">561</span>   * @param conf Configuration<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @return A version specific Hfile Reader<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      throws IOException {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    HFileSystem hfs = null;<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // the filesystem.<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    if (!(fs instanceof HFileSystem)) {<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      hfs = new HFileSystem(fs);<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    } else {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      hfs = (HFileSystem) fs;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    }<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.579"></a>
-<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
-<span class="sourceLineNo">583</span>  * Creates reader with cache configuration disabled<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  * @param fs filesystem<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  * @param path Path to file to read<a name="line.585"></a>
-<span class="sourceLineNo">586</span>  * @return an active Reader instance<a name="line.586"></a>
-<span class="sourceLineNo">587</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  */<a name="line.589"></a>
-<span class="sourceLineNo">590</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      throws IOException {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.592"></a>
-<span class="sourceLineNo">593</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.593"></a>
-<span class="sourceLineNo">594</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>  }<a name="line.595"></a>
-<span class="sourceLineNo">596</span><a name="line.596"></a>
-<span class="sourceLineNo">597</span>  /**<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   * @param fs filesystem<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   * @param path Path to file to read<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * @param cacheConf This must not be null. @see<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   * @return an active Reader instance<a name="line.603"></a>
-<span class="sourceLineNo">604</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.604"></a>
-<span class="sourceLineNo">605</span>   *           is corrupt/invalid.<a name="line.605"></a>
-<span class="sourceLineNo">606</span>   */<a name="line.606"></a>
-<span class="sourceLineNo">607</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.609"></a>
-<span class="sourceLineNo">610</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.612"></a>
-<span class="sourceLineNo">613</span>  }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>  /**<a name="line.615"></a>
-<span class="sourceLineNo">616</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>   * The sockets and the file descriptors held by the method parameter<a name="line.617"></a>
-<span class="sourceLineNo">618</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.618"></a>
-<span class="sourceLineNo">619</span>   * that no other threads have access to the same passed reference.<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
-<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
-<span class="sourceLineNo">622</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.625"></a>
-<span class="sourceLineNo">626</span>  }<a name="line.626"></a>
-<span class="sourceLineNo">627</span><a name="line.627"></a>
-<span class="sourceLineNo">628</span>  /**<a name="line.628"></a>
-<span class="sourceLineNo">629</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.629"></a>
-<span class="sourceLineNo">630</span>   * @param fs filesystem<a name="line.630"></a>
-<span class="sourceLineNo">631</span>   * @param path Path to file to verify<a name="line.631"></a>
-<span class="sourceLineNo">632</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.632"></a>
-<span class="sourceLineNo">633</span>   * @throws IOException if failed to read from the underlying stream<a name="line.633"></a>
-<span class="sourceLineNo">634</span>   */<a name="line.634"></a>
-<span class="sourceLineNo">635</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>  }<a name="line.637"></a>
-<span class="sourceLineNo">638</span><a name="line.638"></a>
-<span class="sourceLineNo">639</span>  /**<a name="line.639"></a>
-<span class="sourceLineNo">640</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>   * @param fs filesystem<a name="line.641"></a>
-<span class="sourceLineNo">642</span>   * @param fileStatus the file to verify<a name="line.642"></a>
-<span class="sourceLineNo">643</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.643"></a>
-<span class="sourceLineNo">644</span>   * @throws IOException if failed to read from the underlying stream<a name="line.644"></a>
-<span class="sourceLineNo">645</span>   */<a name="line.645"></a>
-<span class="sourceLineNo">646</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      throws IOException {<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    final Path path = fileStatus.getPath();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    final long size = fileStatus.getLen();<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try (FSDataInputStreamWrapper fsdis = new FSDataInputStreamWrapper(fs, path)) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      return true;<a name="line.654"></a>
-<span class="sourceLineNo">655</span>    } catch (IllegalArgumentException e) {<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      return false;<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  }<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>  /**<a name="line.660"></a>
-<span class="sourceLineNo">661</span>   * Metadata for this file. Conjured by the writer. Read in by the reader.<a name="line.661"></a>
-<span class="sourceLineNo">662</span>   */<a name="line.662"></a>
-<span class="sourceLineNo">663</span>  public static class FileInfo implements SortedMap&lt;byte[], byte[]&gt; {<a name="line.663"></a>
-<span class="sourceLineNo">664</span>    static final String RESERVED_PREFIX = "hfile.";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");<a name="line.667"></a>
-<span class="sourceLineNo">668</span>    static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    static final byte [] CREATE_TIME_TS = Bytes.toBytes(RESERVED_PREFIX + "CREATE_TIME_TS");<a name="line.669"></a>
-<span class="sourceLineNo">670</span>    static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    static final byte [] TAGS_COMPRESSED = Bytes.toBytes(RESERVED_PREFIX + "TAGS_COMPRESSED");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    private final SortedMap&lt;byte [], byte []&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>    public FileInfo() {<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      super();<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    }<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>    /**<a name="line.679"></a>
-<span class="sourceLineNo">680</span>     * Append the given key/value pair to the file info, optionally checking the<a name="line.680"></a>
-<span class="sourceLineNo">681</span>     * key prefix.<a name="line.681"></a>
-<span class="sourceLineNo">682</span>     *<a name="line.682"></a>
-<span class="sourceLineNo">683</span>     * @param k key to add<a name="line.683"></a>
-<span class="sourceLineNo">684</span>     * @param v value to add<a name="line.684"></a>
-<span class="sourceLineNo">685</span>     * @param checkPrefix whether to check that the provided key does not start<a name="line.685"></a>
-<span class="sourceLineNo">686</span>     *          with the reserved prefix<a name="line.686"></a>
-<span class="sourceLineNo">687</span>     * @return this file info object<a name="line.687"></a>
-<span class="sourceLineNo">688</span>     * @throws IOException if the key or value is invalid<a name="line.688"></a>
-<span class="sourceLineNo">689</span>     */<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    public FileInfo append(final byte[] k, final byte[] v,<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        final boolean checkPrefix) throws IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      if (k == null || v == null) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>        throw new NullPointerException("Key nor value may be null");<a name="line.693"></a>
-<span class="sourceLineNo">694</span>      }<a name="line.694"></a>
-<span class="sourceLineNo">695</span>      if (checkPrefix &amp;&amp; isReservedFileInfoKey(k)) {<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        throw new IOException("Keys with a " + FileInfo.RESERVED_PREFIX<a name="line.696"></a>
-<span class="sourceLineNo">697</span>            + " are reserved");<a name="line.697"></a>
-<span class="sourceLineNo">698</span>      }<a name="line.698"></a>
-<span class="sourceLineNo">699</span>      put(k, v);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>      return this;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    }<a name="line.701"></a>
-<span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>    @Override<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    public void clear() {<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      this.map.clear();<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    }<a name="line.706"></a>
-<span class="sourceLineNo">707</span><a name="line.707"></a>
-<span class="sourceLineNo">708</span>    @Override<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    public Comparator&lt;? super byte[]&gt; comparator() {<a name="line.709"></a>
-<span class="sourceLineNo">710</span>      return map.comparator();<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    }<a name="line.711"></a>
-<span class="sourceLineNo">712</span><a name="line.712"></a>
-<span class="sourceLineNo">713</span>    @Override<a name="line.713"></a>
-<span class="sourceLineNo">714</span>    public boolean containsKey(Object key) {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>      return map.containsKey(key);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    }<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>    @Override<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    public boolean containsValue(Object value) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>      return map.containsValue(value);<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>    @Override<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    public Set&lt;java.util.Map.Entry&lt;byte[], byte[]&gt;&gt; entrySet() {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      return map.entrySet();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>    }<a name="line.726"></a>
-<span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span>    @Override<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    public boolean equals(Object o) {<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      return map.equals(o);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    }<a name="line.731"></a>
-<span class="sourceLineNo">732</span><a name="line.732"></a>
-<span class="sourceLineNo">733</span>    @Override<a name="line.733"></a>
-<span class="sourceLineNo">734</span>    public byte[] firstKey() {<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      return map.firstKey();<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    }<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    @Override<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    public byte[] get(Object key) {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      return map.get(key);<a name="line.740"></a>
-<span class="sourceLineNo">741</span>    }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>    @Override<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    public int hashCode() {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      return map.hashCode();<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    }<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>    @Override<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    public SortedMap&lt;byte[], byte[]&gt; headMap(byte[] toKey) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>      return this.map.headMap(toKey);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    }<a name="line.751"></a>
-<span class="sourceLineNo">752</span><a name="line.752"></a>
-<span class="sourceLineNo">753</span>    @Override<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    public boolean isEmpty() {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      return map.isEmpty();<a name="line.755"></a>
-<span class="sourceLineNo">756</span>    }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>    @Override<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    public Set&lt;byte[]&gt; keySet() {<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      return map.keySet();<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    }<a name="line.761"></a>
-<span class="sourceLineNo">762</span><a name="line.762"></a>
-<span class="sourceLineNo">763</span>    @Override<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    public byte[] lastKey() {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>      return map.lastKey();<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    }<a name="line.766"></a>
-<span class="sourceLineNo">767</span><a name="line.767"></a>
-<span class="sourceLineNo">768</span>    @Override<a name="line.768"></a>
-<span class="sourceLineNo">769</span>    public byte[] put(byte[] key, byte[] value) {<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      return this.map.put(key, value);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>    }<a name="line.771"></a>
-<span class="sourceLineNo">772</span><a name="line.772"></a>
-<span class="sourceLineNo">773</span>    @Override<a name="line.773"></a>
-<span class="sourceLineNo">774</span>    public void putAll(Map&lt;? extends byte[], ? extends byte[]&gt; m) {<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      this.map.putAll(m);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>    }<a name="line.776"></a>
-<span class="sourceLineNo">777</span><a name="line.777"></a>
-<span class="sourceLineNo">778</span>    @Override<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    public byte[] remove(Object key) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>      return this.map.remove(key);<a name="line.780"></a>
-<span class="sourceLineNo">781</span>    }<a name="line.781"></a>
-<span class="sourceLineNo">782</span><a name="line.782"></a>
-<span class="sourceLineNo">783</span>    @Override<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    public int size() {<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      return map.size();<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span><a name="line.787"></a>
-<span class="sourceLineNo">788</span>    @Override<a name="line.788"></a>
-<span class="sourceLineNo">789</span>    public SortedMap&lt;byte[], byte[]&gt; subMap(byte[] fromKey, byte[] toKey) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      return this.map.subMap(fromKey, toKey);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    }<a name="line.791"></a>
-<span class="sourceLineNo">792</span><a name="line.792"></a>
-<span class="sourceLineNo">793</span>    @Override<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    public SortedMap&lt;byte[], byte[]&gt; tailMap(byte[] fromKey) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>      return this.map.tailMap(fromKey);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    }<a name="line.796"></a>
-<span class="sourceLineNo">797</span><a name="line.797"></a>
-<span class="sourceLineNo">798</span>    @Override<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    public Collection&lt;byte[]&gt; values() {<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      return map.values();<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    }<a name="line.801"></a>
-<span class="sourceLineNo">802</span><a name="line.802"></a>
-<span class="sourceLineNo">803</span>    /**<a name="line.803"></a>
-<span class="sourceLineNo">804</span>     * Write out this instance on the passed in &lt;code&gt;out&lt;/code&gt; stream.<a name="line.804"></a>
-<span class="sourceLineNo">805</span>     * We write it as a protobuf.<a name="line.805"></a>
-<span class="sourceLineNo">806</span>     * @param out<a name="line.806"></a>
-<span class="sourceLineNo">807</span>     * @throws IOException<a name="line.807"></a>
-<span class="sourceLineNo">808</span>     * @see #read(DataInputStream)<a name="line.808"></a>
-<span class="sourceLineNo">809</span>     */<a name="line.809"></a>
-<span class="sourceLineNo">810</span>    void write(final DataOutputStream out) throws IOException {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>      HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();<a name="line.811"></a>
-<span class="sourceLineNo">812</span>      for (Map.Entry&lt;byte [], byte[]&gt; e: this.map.entrySet()) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>        HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey()));<a name="line.814"></a>
-<span class="sourceLineNo">815</span>        bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue()));<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        builder.addMapEntry(bbpBuilder.build());<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      }<a name="line.817"></a>
-<span class="sourceLineNo">818</span>      out.write(ProtobufMagic.PB_MAGIC);<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      builder.build().writeDelimitedTo(out);<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    }<a name="line.820"></a>
-<span class="sourceLineNo">821</span><a name="line.821"></a>
-<span class="sourceLineNo">822</span>    /**<a name="line.822"></a>
-<span class="sourceLineNo">823</span>     * Populate this instance with what we find on the passed in &lt;code&gt;in&lt;/code&gt; stream.<a name="line.823"></a>
-<span class="sourceLineNo">824</span>     * Can deserialize protobuf of old Writables format.<a name="line.824"></a>
-<span class="sourceLineNo">825</span>     * @param in<a name="line.825"></a>
-<span class="sourceLineNo">826</span>     * @throws IOException<a name="line.826"></a>
-<span class="sourceLineNo">827</span>     * @see #write(DataOutputStream)<a name="line.827"></a>
-<span class="sourceLineNo">828</span>     */<a name="line.828"></a>
-<span class="sourceLineNo">829</span>    void read(final DataInputStream in) throws IOException {<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      // This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.<a name="line.830"></a>
-<span class="sourceLineNo">831</span>      int pblen = ProtobufUtil.lengthOfPBMagic();<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      byte [] pbuf = new byte[pblen];<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      if (in.markSupported()) in.mark(pblen);<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      int read = in.read(pbuf);<a name="line.834"></a>
-<span class="sourceLineNo">835</span>      if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);<a name="line.835"></a>
-<span class="sourceLineNo">836</span>      if (ProtobufUtil.isPBMagicPrefix(pbuf)) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>        parsePB(HFileProtos.FileInfoProto.parseDelimitedFrom(in));<a name="line.837"></a>
-<span class="sourceLineNo">838</span>      } else {<a name="line.838"></a>
-<span class="sourceLineNo">839</span>        if (in.markSupported()) {<a name="line.839"></a>
-<span class="sourceLineNo">840</span>          in.reset();<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          parseWritable(in);<a name="line.841"></a>
-<span class="sourceLineNo">842</span>        } else {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          // We cannot use BufferedInputStream, it consumes more than we read from the underlying IS<a name="line.843"></a>
-<span class="sourceLineNo">844</span>          ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);<a name="line.844"></a>
-<span class="sourceLineNo">845</span>          SequenceInputStream sis = new SequenceInputStream(bais, in); // Concatenate input streams<a name="line.845"></a>
-<span class="sourceLineNo">846</span>          // TODO: Am I leaking anything here wrapping the passed in stream?  We are not calling close on the wrapped<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          // streams but they should be let go after we leave this context?  I see that we keep a reference to the<a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // passed in inputstream but since we no longer have a reference to this after we leave, we should be ok.<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          parseWritable(new DataInputStream(sis));<a name="line.849"></a>
-<span class="sourceLineNo">850</span>        }<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      }<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    }<a name="line.852"></a>
-<span class="sourceLineNo">853</span><a name="line.853"></a>
-<span class="sourceLineNo">854</span>    /** Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of<a name="line.854"></a>
-<span class="sourceLineNo">855</span>     * a byte [].  The old map format had a byte before each entry that held a code which was short for the key or<a name="line.855"></a>
-<span class="sourceLineNo">856</span>     * value type.  We know it was a byte [] so in below we just read and dump it.<a name="line.856"></a>
-<span class="sourceLineNo">857</span>     * @throws IOException<a name="line.857"></a>
-<span class="sourceLineNo">858</span>     */<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    void parseWritable(final DataInputStream in) throws IOException {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>      // First clear the map.  Otherwise we will just accumulate entries every time this method is called.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      this.map.clear();<a name="line.861"></a>
-<span class="sourceLineNo">862</span>      // Read the number of entries in the map<a name="line.862"></a>
-<span class="sourceLineNo">863</span>      int entries = in.readInt();<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      // Then read each key/value pair<a name="line.864"></a>
-<span class="sourceLineNo">865</span>      for (int i = 0; i &lt; entries; i++) {<a name="line.865"></a>
-<span class="sourceLineNo">866</span>        byte [] key = Bytes.readByteArray(in);<a name="line.866"></a>
-<span class="sourceLineNo">867</span>        // We used to read a byte that encoded the class type.  Read and ignore it because it is always byte [] in hfile<a name="line.867"></a>
-<span class="sourceLineNo">868</span>        in.readByte();<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        byte [] value = Bytes.readByteArray(in);<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        this.map.put(key, value);<a name="line.870"></a>
-<span class="sourceLineNo">871</span>      }<a name="line.871"></a>
-<span class="sourceLineNo">872</span>    }<a name="line.872"></a>
-<span class="sourceLineNo">873</span><a name="line.873"></a>
-<span class="sourceLineNo">874</span>    /**<a name="line.874"></a>
-<span class="sourceLineNo">875</span>     * Fill our map with content of the pb we read off disk<a name="line.875"></a>
-<span class="sourceLineNo">876</span>     * @param fip protobuf message to read<a name="line.876"></a>
-<span class="sourceLineNo">877</span>     */<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    void parsePB(final HFileProtos.FileInfoProto fip) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      this.map.clear();<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      for (BytesBytesPair pair: fip.getMapEntryList()) {<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>  }<a name="line.884"></a>
-<span class="sourceLineNo">885</span><a name="line.885"></a>
-<span class="sourceLineNo">886</span>  /** Return true if the given file info key is reserved for internal use. */<a name="line.886"></a>
-<span class="sourceLineNo">887</span>  public static boolean isReservedFileInfoKey(byte[] key) {<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    return Bytes.startsWith(key, FileInfo.RESERVED_PREFIX_BYTES);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  /**<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * Get names of supported compression algorithms. The names are acceptable by<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * HFile.Writer.<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   *<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   * @return Array of strings, each represents a supported compression<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   *         algorithm. Currently, the following compression algorithms are<a name="line.896"></a>
-<span class="sourceLineNo">897</span>   *         supported.<a name="line.897"></a>
-<span class="sourceLineNo">898</span>   *         &lt;ul&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>   *         &lt;li&gt;"none" - No compression.<a name="line.899"></a>
-<span class="sourceLineNo">900</span>   *         &lt;li&gt;"gz" - GZIP compression.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>   *         &lt;/ul&gt;<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   */<a name="line.902"></a>
-<span class="sourceLineNo">903</span>  public static String[] getSupportedCompressionAlgorithms() {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>    return Compression.getSupportedAlgorithms();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>  }<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>  // Utility methods.<a name="line.907"></a>
-<span class="sourceLineNo">908</span>  /*<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   * @param l Long to convert to an int.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @return &lt;code&gt;l&lt;/code&gt; cast as an int.<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   */<a name="line.911"></a>
-<span class="sourceLineNo">912</span>  static int longToInt(final long l) {<a name="line.912"></a>
-<span class="sourceLineNo">913</span>    // Expecting the size() of a block not exceeding 4GB. Assuming the<a name="line.913"></a>
-<span class="sourceLineNo">914</span>    // size() will wrap to negative integer if it exceeds 2GB (From tfile).<a name="line.914"></a>
-<span class="sourceLineNo">915</span>    return (int)(l &amp; 0x00000000ffffffffL);<a name="line.915"></a>
-<span class="sourceLineNo">916</span>  }<a name="line.916"></a>
-<span class="sourceLineNo">917</span><a name="line.917"></a>
-<span class="sourceLineNo">918</span>  /**<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * Returns all HFiles belonging to the given region directory. Could return an<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   * empty list.<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   *<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   * @param fs  The file system reference.<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param regionDir  The region directory to scan.<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   * @return The list of files found.<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   * @throws IOException When scanning the files fails.<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   */<a name="line.926"></a>
-<span class="sourceLineNo">927</span>  static List&lt;Path&gt; getStoreFiles(FileSystem fs, Path regionDir)<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      throws IOException {<a name="line.928"></a>
-<span class="sourceLineNo">929</span>    List&lt;Path&gt; regionHFiles = new ArrayList&lt;&gt;();<a name="line.929"></a>
-<span class="sourceLineNo">930</span>    PathFilter dirFilter = new FSUtils.DirFilter(fs);<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    for(FileStatus dir : familyDirs) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      FileStatus[] files = fs.listStatus(dir.getPath());<a name="line.933"></a>
-<span class="sourceLineNo">934</span>      for (FileStatus file : files) {<a name="line.934"></a>
-<span class="sourceLineNo">935</span>        if (!file.isDirectory() &amp;&amp;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>            (!file.getPath().toString().contains(HConstants.HREGION_OLDLOGDIR_NAME)) &amp;&amp;<a name="line.936"></a>
-<span class="sourceLineNo">937</span>            (!file.getPath().toString().contains(HConstants.RECOVERED_EDITS_DIR))) {<a name="line.937"></a>
-<span class="sourceLineNo">938</span>          regionHFiles.add(file.getPath());<a name="line.938"></a>
-<span class="sourceLineNo">939</span>        }<a name="line.939"></a>
-<span class="sourceLineNo">940</span>      }<a name="line.940"></a>
-<span class="sourceLineNo">941</span>    }<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    return regionHFiles;<a name="line.942"></a>
-<span class="sourceLineNo">943</span>  }<a name="line.943"></a>
-<span class="sourceLineNo">944</span><a name="line.944"></a>
-<span class="sourceLineNo">945</span>  /**<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   * Checks the given {@link HFile} format version, and throws an exception if<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * invalid. Note that if the version number comes from an input file and has<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   * not been verified, the caller needs to re-throw an {@link IOException} to<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   * indicate that this is not a software error, but corrupted input.<a name="line.949"></a>
-<span class="sourceLineNo">950</span>   *<a name="line.950"></a>
-<span class="sourceLineNo">951</span>   * @param version an HFile version<a name="line.951"></a>
-<span class="sourceLineNo">952</span>   * @throws IllegalArgumentException if the version is invalid<a name="line.952"></a>
-<span class="sourceLineNo">953</span>   */<a name="line.953"></a>
-<span class="sourceLineNo">954</span>  public static void checkFormatVersion(int version)<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      throws IllegalArgumentException {<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    if (version &lt; MIN_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      throw new IllegalArgumentException("Invalid HFile version: " + version<a name="line.957"></a>
-<span class="sourceLineNo">958</span>          + " (expected to be " + "between " + MIN_FORMAT_VERSION + " and "<a name="line.958"></a>
-<span class="sourceLineNo">959</span>          + MAX_FORMAT_VERSION + ")");<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    }<a name="line.960"></a>
-<span class="sourceLineNo">961</span>  }<a name="line.961"></a>
-<span class="sourceLineNo">962</span><a name="line.962"></a>
-<span class="sourceLineNo">963</span><a name="line.963"></a>
-<span class="sourceLineNo">964</span>  public static void checkHFileVersion(final Configuration c) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>    int version = c.getInt(FORMAT_VERSION_KEY, MAX_FORMAT_VERSION);<a name="line.965"></a>
-<span class="sourceLineNo">966</span>    if (version &lt; MAX_FORMAT_VERSION || version &gt; MAX_FORMAT_VERSION) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>      throw new IllegalArgumentException("The setting for " + FORMAT_VERSION_KEY +<a name="line.967"></a>
-<span class="sourceLineNo">968</span>        " (in your hbase-*.xml files) is " + version + " which does not match " +<a name="line.968"></a>
-<span class="sourceLineNo">969</span>        MAX_FORMAT_VERSION +<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        "; are you running with a configuration from an older or newer hbase install (an " +<a name="line.970"></a>
-<span class="sourceLineNo">971</span>        "incompatible hbase-default.xml or hbase-site.xml on your CLASSPATH)?");<a name="line.971"></a>
-<span class="sourceLineNo">972</span>    }<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  }<a name="line.973"></a>
-<span class="sourceLineNo">974</span><a name="line.974"></a>
-<span class="sourceLineNo">975</span>  public static void main(String[] args) throws Exception {<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    // delegate to preserve old behavior<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    HFilePrettyPrinter.main(args);<a name="line.977"></a>
-<span class="sourceLineNo">978</span>  }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>}<a name="line.979"></a>
+<span class="sourceLineNo">495</span>    @VisibleForTesting<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    HFileBlock.FSReader getUncachedBlockReader();<a name="line.496"></a>
+<span class="sourceLineNo">497</span><a name="line.497"></a>
+<span class="sourceLineNo">498</span>    @VisibleForTesting<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    boolean prefetchComplete();<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>    /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>     * To close the stream's socket. Note: This can be concurrently called from multiple threads and<a name="line.502"></a>
+<span class="sourceLineNo">503</span>     * implementation should take care of thread safety.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>     */<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    void unbufferStream();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
+<span class="sourceLineNo">507</span><a name="line.507"></a>
+<span class="sourceLineNo">508</span>  /**<a name="line.508"></a>
+<span class="sourceLineNo">509</span>   * Method returns the reader given the specified arguments.<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * TODO This is a bad abstraction.  See HBASE-6635.<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   *<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param path hfile's path<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param fsdis stream of path's file<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param size max size of the trailer.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @param cacheConf Cache configuation values, cannot be null.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   * @param hfs<a name="line.516"></a>
+<span class="sourceLineNo">517</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.517"></a>
+<span class="sourceLineNo">518</span>   * @return an appropriate instance of HFileReader<a name="line.518"></a>
+<span class="sourceLineNo">519</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.519"></a>
+<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
+<span class="sourceLineNo">521</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SF_SWITCH_FALLTHROUGH",<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      justification="Intentional")<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  private static Reader openReader(Path path, FSDataInputStreamWrapper fsdis, long size,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>      CacheConfig cacheConf, HFileSystem hfs, boolean primaryReplicaReader, Configuration conf)<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      throws IOException {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    FixedFileTrailer trailer = null;<a name="line.526"></a>
+<span class="sourceLineNo">527</span>    try {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>      boolean isHBaseChecksum = fsdis.shouldUseHBaseChecksum();<a name="line.528"></a>
+<span class="sourceLineNo">529</span>      assert !isHBaseChecksum; // Initially we must read with FS checksum.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      trailer = FixedFileTrailer.readFromStream(fsdis.getStream(isHBaseChecksum), size);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      switch (trailer.getMajorVersion()) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        case 2:<a name="line.532"></a>
+<span class="sourceLineNo">533</span>          LOG.debug("Opening HFile v2 with v3 reader");<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          // Fall through. FindBugs: SF_SWITCH_FALLTHROUGH<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        case 3:<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          return new HFileReaderImpl(path, trailer, fsdis, size, cacheConf, hfs,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              primaryReplicaReader, conf);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        default:<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          throw new IllegalArgumentException("Invalid HFile version " + trailer.getMajorVersion());<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    } catch (Throwable t) {<a name="line.541"></a>
+<span class="sourceLineNo">542</span>      IOUtils.closeQuietly(fsdis);<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      throw new CorruptHFileException("Problem reading HFile Trailer from file " + path, t);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    } finally {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      fsdis.unbuffer();<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>  }<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>  /**<a name="line.549"></a>
+<span class="sourceLineNo">550</span>   * The sockets and the file descriptors held by the method parameter<a name="line.550"></a>
+<span class="sourceLineNo">551</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.551"></a>
+<span class="sourceLineNo">552</span>   * that no other threads have access to the same passed reference.<a name="line.552"></a>
+<span class="sourceLineNo">553</span>   * @param fs A file system<a name="line.553"></a>
+<span class="sourceLineNo">554</span>   * @param path Path to HFile<a name="line.554"></a>
+<span class="sourceLineNo">555</span>   * @param fsdis a stream of path's file<a name="line.555"></a>
+<span class="sourceLineNo">556</span>   * @param size max size of the trailer.<a name="line.556"></a>
+<span class="sourceLineNo">557</span>   * @param cacheConf Cache configuration for hfile's contents<a name="line.557"></a>
+<span class="sourceLineNo">558</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.558"></a>
+<span class="sourceLineNo">559</span>   * @param conf Configuration<a name="line.559"></a>
+<span class="sourceLineNo">560</span>   * @return A version specific Hfile Reader<a name="line.560"></a>
+<span class="sourceLineNo">561</span>   * @throws IOException If file is invalid, will throw CorruptHFileException flavored IOException<a name="line.561"></a>
+<span class="sourceLineNo">562</span>   */<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  public static Reader createReader(FileSystem fs, Path path, FSDataInputStreamWrapper fsdis,<a name="line.563"></a>
+<span class="sourceLineNo">564</span>      long size, CacheConfig cacheConf, boolean primaryReplicaReader, Configuration conf)<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      throws IOException {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    HFileSystem hfs = null;<a name="line.566"></a>
+<span class="sourceLineNo">567</span><a name="line.567"></a>
+<span class="sourceLineNo">568</span>    // If the fs is not an instance of HFileSystem, then create an<a name="line.568"></a>
+<span class="sourceLineNo">569</span>    // instance of HFileSystem that wraps over the specified fs.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>    // In this case, we will not be able to avoid checksumming inside<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    // the filesystem.<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    if (!(fs instanceof HFileSystem)) {<a name="line.572"></a>
+<span class="sourceLineNo">573</span>      hfs = new HFileSystem(fs);<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    } else {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      hfs = (HFileSystem) fs;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    return openReader(path, fsdis, size, cacheConf, hfs, primaryReplicaReader, conf);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>  }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>  /**<a name="line.580"></a>
+<span class="sourceLineNo">581</span>  * Creates reader with cache configuration disabled<a name="line.581"></a>
+<span class="sourceLineNo">582</span>  * @param fs filesystem<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  * @param path Path to file to read<a name="line.583"></a>
+<span class="sourceLineNo">584</span>  * @return an active Reader instance<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  * @throws IOException Will throw a CorruptHFileException<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  * (DoNotRetryIOException subtype) if hfile is corrupt/invalid.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>  */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  public static Reader createReader(FileSystem fs, Path path, Configuration conf)<a name="line.588"></a>
+<span class="sourceLineNo">589</span>      throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // The primaryReplicaReader is mainly used for constructing block cache key, so if we do not use<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    // block cache then it is OK to set it as any value. We use true here.<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    return createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>  }<a name="line.593"></a>
+<span class="sourceLineNo">594</span><a name="line.594"></a>
+<span class="sourceLineNo">595</span>  /**<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   * @param fs filesystem<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   * @param path Path to file to read<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * @param cacheConf This must not be null. @see<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   *          {@link org.apache.hadoop.hbase.io.hfile.CacheConfig#CacheConfig(Configuration)}<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * @param primaryReplicaReader true if this is a reader for primary replica<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   * @return an active Reader instance<a name="line.601"></a>
+<span class="sourceLineNo">602</span>   * @throws IOException Will throw a CorruptHFileException (DoNotRetryIOException subtype) if hfile<a name="line.602"></a>
+<span class="sourceLineNo">603</span>   *           is corrupt/invalid.<a name="line.603"></a>
+<span class="sourceLineNo">604</span>   */<a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static Reader createReader(FileSystem fs, Path path, CacheConfig cacheConf,<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      boolean primaryReplicaReader, Configuration conf) throws IOException {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    Preconditions.checkNotNull(cacheConf, "Cannot create Reader with null CacheConf");<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return openReader(path, stream, fs.getFileStatus(path).getLen(), cacheConf,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      stream.getHfs(), primaryReplicaReader, conf);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * This factory method is used only by unit tests. &lt;br/&gt;<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * The sockets and the file descriptors held by the method parameter<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * {@code FSDataInputStreamWrapper} passed will be freed after its usage so caller needs to ensure<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * that no other threads have access to the same passed reference.<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   */<a name="line.618"></a>
+<span class="sourceLineNo">619</span>  @VisibleForTesting<a name="line.619"></a>
+<span class="sourceLineNo">620</span>  static Reader createReaderFromStream(Path path, FSDataInputStream fsdis, long size,<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      CacheConfig cacheConf, Configuration conf) throws IOException {<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    FSDataInputStreamWrapper wrapper = new FSDataInputStreamWrapper(fsdis);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    return openReader(path, wrapper, size, cacheConf, null, true, conf);<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>  /**<a name="line.626"></a>
+<span class="sourceLineNo">627</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>   * @param fs filesystem<a name="line.628"></a>
+<span class="sourceLineNo">629</span>   * @param path Path to file to verify<a name="line.629"></a>
+<span class="sourceLineNo">630</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.630"></a>
+<span class="sourceLineNo">631</span>   * @throws IOException if failed to read from the underlying stream<a name="line.631"></a>
+<span class="sourceLineNo">632</span>   */<a name="line.632"></a>
+<span class="sourceLineNo">633</span>  public static boolean isHFileFormat(final FileSystem fs, final Path path) throws IOException {<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    return isHFileFormat(fs, fs.getFileStatus(path));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  /**<a name="line.637"></a>
+<span class="sourceLineNo">638</span>   * Returns true if the specified file has a valid HFile Trailer.<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * @param fs filesystem<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * @param fileStatus the file to verify<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   * @return true if the file has a valid HFile Trailer, otherwise false<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @throws IOException if failed to read from the underlying stream<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   */<a name="line.643"></a>
+<span class="sourceLineNo">644</span>  public static boolean isHFileFormat(final FileSystem fs, final FileStatus fileStatus)<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throws IOException {<a name="line.645"></a

<TRUNCATED>

[39/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index f8359fc..b0bb226 100644
--- a/book.html
+++ b/book.html
@@ -41371,7 +41371,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2018-12-11 14:33:38 UTC
+Last updated 2018-12-12 14:33:02 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index e4c5ca9..30665c9 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -316,7 +316,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index e8f82d1..c3aefbb 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -9766,12 +9766,12 @@
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation">JavadocTagContinuationIndentation</a>
 <ul>
 <li>offset: <tt>&quot;2&quot;</tt></li></ul></td>
-<td>728</td>
+<td>729</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription">NonEmptyAtclauseDescription</a></td>
-<td>3417</td>
+<td>3416</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td>misc</td>
@@ -18765,7 +18765,7 @@
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
+<td>JavadocTagContinuationIndentation</td>
 <td>Javadoc comment at column 0 has parse error. Details: no viable alternative at input '   *' while parsing JAVADOC_TAG</td>
 <td>117</td></tr>
 <tr class="b">
@@ -47135,181 +47135,181 @@
 <td>indentation</td>
 <td>Indentation</td>
 <td>'case' child has incorrect indentation level 4, expected level should be 6.</td>
-<td>371</td></tr>
+<td>369</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 6, expected level should be 8.</td>
-<td>372</td></tr>
+<td>370</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'case' child has incorrect indentation level 4, expected level should be 6.</td>
-<td>376</td></tr>
+<td>374</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 6, expected level should be 8.</td>
-<td>377</td></tr>
+<td>375</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'case' child has incorrect indentation level 4, expected level should be 6.</td>
-<td>378</td></tr>
+<td>376</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'block' child has incorrect indentation level 6, expected level should be 8.</td>
-<td>379</td></tr>
+<td>377</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>394</td></tr>
+<td>392</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>395</td></tr>
+<td>393</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>398</td></tr>
+<td>396</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>399</td></tr>
+<td>397</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>401</td></tr>
+<td>399</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>402</td></tr>
+<td>400</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>403</td></tr>
+<td>401</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>405</td></tr>
+<td>403</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>518</td></tr>
+<td>516</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>JavadocTagContinuationIndentation</td>
 <td>Line continuation have incorrect indentation level, expected level should be 2.</td>
-<td>588</td></tr>
+<td>586</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>806</td></tr>
+<td>804</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>807</td></tr>
+<td>805</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>825</td></tr>
+<td>823</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>826</td></tr>
+<td>824</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>833</td></tr>
+<td>831</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>835</td></tr>
+<td>833</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 117).</td>
-<td>846</td></tr>
+<td>844</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 114).</td>
-<td>847</td></tr>
+<td>845</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 115).</td>
-<td>848</td></tr>
+<td>846</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 114).</td>
-<td>854</td></tr>
+<td>852</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 114).</td>
-<td>855</td></tr>
+<td>853</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>857</td></tr>
+<td>855</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 106).</td>
-<td>860</td></tr>
+<td>858</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 120).</td>
-<td>867</td></tr></table></div>
+<td>865</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.io.hfile.HFileBlock.java">org/apache/hadoop/hbase/io/hfile/HFileBlock.java</h3>
 <table border="0" class="table table-striped">
@@ -54556,7 +54556,7 @@
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>MethodLength</td>
-<td>Method length is 279 lines (max allowed is 150).</td>
+<td>Method length is 277 lines (max allowed is 150).</td>
 <td>191</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
@@ -54569,97 +54569,97 @@
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>435</td></tr>
+<td>433</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 6, expected level should be 8.</td>
-<td>465</td></tr>
+<td>463</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 5 should be on the previous line.</td>
-<td>506</td></tr>
+<td>504</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 9 should be on the previous line.</td>
-<td>513</td></tr>
+<td>511</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>MethodParamPad</td>
 <td>'(' should be on the previous line.</td>
-<td>519</td></tr>
+<td>517</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>MethodParamPad</td>
 <td>'(' should be on the previous line.</td>
-<td>547</td></tr>
+<td>545</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>650</td></tr>
+<td>648</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>651</td></tr>
+<td>649</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>653</td></tr>
+<td>651</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>653</td></tr>
+<td>651</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 5 should be on the previous line.</td>
-<td>654</td></tr>
+<td>652</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 146).</td>
-<td>662</td></tr>
+<td>660</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>665</td></tr>
+<td>663</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 103).</td>
-<td>861</td></tr>
+<td>859</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 124).</td>
-<td>863</td></tr>
+<td>861</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 121).</td>
-<td>878</td></tr></table></div>
+<td>876</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.mapreduce.HRegionPartitioner.java">org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java</h3>
 <table border="0" class="table table-striped">
@@ -107684,535 +107684,535 @@
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>1058</td></tr>
+<td>1057</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1155</td></tr>
+<td>1154</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1160</td></tr>
+<td>1159</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1210</td></tr>
+<td>1209</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1214</td></tr>
+<td>1213</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 118).</td>
-<td>1220</td></tr>
+<td>1219</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 113).</td>
-<td>1221</td></tr>
+<td>1220</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>1229</td></tr>
+<td>1228</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 113).</td>
-<td>1230</td></tr>
+<td>1229</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>1232</td></tr>
+<td>1231</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1246</td></tr>
+<td>1245</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1418</td></tr>
+<td>1417</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1419</td></tr>
+<td>1418</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1421</td></tr>
+<td>1420</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>1438</td></tr>
+<td>1437</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1439</td></tr>
+<td>1438</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>1443</td></tr>
+<td>1442</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1454</td></tr>
+<td>1453</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 109).</td>
-<td>1473</td></tr>
+<td>1472</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1474</td></tr>
+<td>1473</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>1503</td></tr>
+<td>1502</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 123).</td>
-<td>1506</td></tr>
+<td>1505</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 102).</td>
-<td>1517</td></tr>
+<td>1516</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>1581</td></tr>
+<td>1580</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'catch' child has incorrect indentation level 8, expected level should be 6.</td>
-<td>1856</td></tr>
+<td>1855</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1866</td></tr>
+<td>1865</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>1867</td></tr>
+<td>1866</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>1870</td></tr>
+<td>1869</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'if' has incorrect indentation level 7, expected level should be 6.</td>
-<td>1905</td></tr>
+<td>1904</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'if' child has incorrect indentation level 9, expected level should be 8.</td>
-<td>1908</td></tr>
+<td>1907</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'if rcurly' has incorrect indentation level 7, expected level should be 6.</td>
-<td>1909</td></tr>
+<td>1908</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>1961</td></tr>
+<td>1960</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 106).</td>
-<td>1977</td></tr>
+<td>1976</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>2038</td></tr>
+<td>2037</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>2228</td></tr>
+<td>2227</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>2249</td></tr>
+<td>2248</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2297</td></tr>
+<td>2296</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2398</td></tr>
+<td>2397</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>MethodLength</td>
 <td>Method length is 232 lines (max allowed is 150).</td>
-<td>2419</td></tr>
+<td>2418</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'throws' has incorrect indentation level 2, expected level should be 4.</td>
-<td>2420</td></tr>
+<td>2419</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2422</td></tr>
+<td>2421</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2494</td></tr>
+<td>2493</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>2657</td></tr>
+<td>2656</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2692</td></tr>
+<td>2691</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2882</td></tr>
+<td>2881</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>2900</td></tr>
+<td>2899</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 103).</td>
-<td>3077</td></tr>
+<td>3076</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>3121</td></tr>
+<td>3120</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>3196</td></tr>
+<td>3195</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 107).</td>
-<td>3196</td></tr>
+<td>3195</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>3231</td></tr>
+<td>3230</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>')' is preceded with whitespace.</td>
-<td>3304</td></tr>
+<td>3303</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3320</td></tr>
+<td>3319</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3368</td></tr>
+<td>3367</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>3427</td></tr>
+<td>3426</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>3430</td></tr>
+<td>3429</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3534</td></tr>
+<td>3533</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3535</td></tr>
+<td>3534</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'method def' child has incorrect indentation level 6, expected level should be 4.</td>
-<td>3613</td></tr>
+<td>3612</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3654</td></tr>
+<td>3653</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>3655</td></tr>
+<td>3654</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'.' has incorrect indentation level 10, expected level should be 12.</td>
-<td>3681</td></tr>
+<td>3680</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
 <td>Indentation</td>
 <td>'KeeperException' has incorrect indentation level 2, expected level should be 4.</td>
-<td>3712</td></tr>
+<td>3711</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>3942</td></tr>
+<td>3941</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>3943</td></tr>
+<td>3942</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>whitespace</td>
 <td>ParenPad</td>
 <td>'(' is followed by whitespace.</td>
-<td>3944</td></tr>
+<td>3943</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 137).</td>
-<td>4164</td></tr>
+<td>4163</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>design</td>
 <td>VisibilityModifier</td>
 <td>Variable 'errorCount' must be private and have accessor methods.</td>
-<td>4203</td></tr>
+<td>4202</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4266</td></tr>
+<td>4265</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 101).</td>
-<td>4375</td></tr>
+<td>4374</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>4436</td></tr>
+<td>4435</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>4507</td></tr>
+<td>4506</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 115).</td>
-<td>4816</td></tr>
+<td>4815</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 108).</td>
-<td>4859</td></tr>
+<td>4858</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 107).</td>
-<td>4864</td></tr>
+<td>4863</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>4865</td></tr>
+<td>4864</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 111).</td>
-<td>4872</td></tr>
+<td>4871</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 116).</td>
-<td>4877</td></tr>
+<td>4876</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 107).</td>
-<td>4884</td></tr>
+<td>4883</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 106).</td>
-<td>4886</td></tr>
+<td>4885</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 113).</td>
-<td>4889</td></tr>
+<td>4888</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 145).</td>
-<td>4891</td></tr>
+<td>4890</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 104).</td>
-<td>4892</td></tr>
+<td>4891</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 179).</td>
-<td>4893</td></tr>
+<td>4892</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>LineLength</td>
 <td>Line is longer than 100 characters (found 113).</td>
-<td>4895</td></tr>
+<td>4894</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4920</td></tr>
+<td>4919</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>4921</td></tr>
+<td>4920</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>blocks</td>
 <td>LeftCurly</td>
 <td>'{' at column 39 should have line break after.</td>
-<td>4937</td></tr>
+<td>4936</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
 <td>MethodLength</td>
 <td>Method length is 229 lines (max allowed is 150).</td>
-<td>4947</td></tr></table></div>
+<td>4946</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.util.HBaseFsckRepair.java">org/apache/hadoop/hbase/util/HBaseFsckRepair.java</h3>
 <table border="0" class="table table-striped">
@@ -115011,7 +115011,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index 7015842..a7d4493 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -385,7 +385,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index 1f2309e..7c585aa 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -450,7 +450,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 5ccbe2a..69dd69e 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -680,7 +680,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index 8c9a99b..cb723c6 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -323,7 +323,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index 9eb420c..5c34a11 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -1009,7 +1009,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 53538af..a9fff1f 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3831,7 +3831,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Tue Dec 11 14:44:59 UTC 2018"</code></td>
+<td class="colLast"><code>"Wed Dec 12 14:43:53 UTC 2018"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
@@ -3845,7 +3845,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"f3af750f342fb0851739ef407054f5df"</code></td>
+<td class="colLast"><code>"67847fa5f6c0734bf527ab6a4205389e"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
index 37d3b73..243f384 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.389">HFile.CachingBlockReader</a></pre>
+<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.387">HFile.CachingBlockReader</a></pre>
 <div class="block">An abstraction used by the block index.
  Implementations will check cache for any asked-for block and return cached block if found.
  Otherwise, after reading from fs, will try and put block into cache before returning.</div>
@@ -175,7 +175,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>readBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.407">readBlock</a>(long&nbsp;offset,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.405">readBlock</a>(long&nbsp;offset,
                      long&nbsp;onDiskBlockSize,
                      boolean&nbsp;cacheBlock,
                      boolean&nbsp;pread,
@@ -212,7 +212,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>returnBlock</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.417">returnBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html#line.415">returnBlock</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;block)</pre>
 <div class="block">Return the given block back to the cache, if it was obtained from cache.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
index 6f0aa9c..075c84b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.663">HFile.FileInfo</a>
+<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.661">HFile.FileInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</pre>
 <div class="block">Metadata for this file. Conjured by the writer. Read in by the reader.</div>
@@ -376,7 +376,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>RESERVED_PREFIX</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.664">RESERVED_PREFIX</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.662">RESERVED_PREFIX</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.hfile.HFile.FileInfo.RESERVED_PREFIX">Constant Field Values</a></dd>
@@ -389,7 +389,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>RESERVED_PREFIX_BYTES</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.665">RESERVED_PREFIX_BYTES</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.663">RESERVED_PREFIX_BYTES</a></pre>
 </li>
 </ul>
 <a name="LASTKEY">
@@ -398,7 +398,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>LASTKEY</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.666">LASTKEY</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.664">LASTKEY</a></pre>
 </li>
 </ul>
 <a name="AVG_KEY_LEN">
@@ -407,7 +407,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>AVG_KEY_LEN</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.667">AVG_KEY_LEN</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.665">AVG_KEY_LEN</a></pre>
 </li>
 </ul>
 <a name="AVG_VALUE_LEN">
@@ -416,7 +416,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>AVG_VALUE_LEN</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.668">AVG_VALUE_LEN</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.666">AVG_VALUE_LEN</a></pre>
 </li>
 </ul>
 <a name="CREATE_TIME_TS">
@@ -425,7 +425,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>CREATE_TIME_TS</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.669">CREATE_TIME_TS</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.667">CREATE_TIME_TS</a></pre>
 </li>
 </ul>
 <a name="COMPARATOR">
@@ -434,7 +434,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>COMPARATOR</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.670">COMPARATOR</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.668">COMPARATOR</a></pre>
 </li>
 </ul>
 <a name="TAGS_COMPRESSED">
@@ -443,7 +443,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>TAGS_COMPRESSED</h4>
-<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.671">TAGS_COMPRESSED</a></pre>
+<pre>static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.669">TAGS_COMPRESSED</a></pre>
 </li>
 </ul>
 <a name="MAX_TAGS_LEN">
@@ -452,7 +452,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>MAX_TAGS_LEN</h4>
-<pre>public static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.672">MAX_TAGS_LEN</a></pre>
+<pre>public static final&nbsp;byte[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.670">MAX_TAGS_LEN</a></pre>
 </li>
 </ul>
 <a name="map">
@@ -461,7 +461,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockListLast">
 <li class="blockList">
 <h4>map</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.673">map</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.671">map</a></pre>
 </li>
 </ul>
 </li>
@@ -478,7 +478,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileInfo</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.675">FileInfo</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.673">FileInfo</a>()</pre>
 </li>
 </ul>
 </li>
@@ -495,7 +495,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.690">append</a>(byte[]&nbsp;k,
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.688">append</a>(byte[]&nbsp;k,
                              byte[]&nbsp;v,
                              boolean&nbsp;checkPrefix)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -520,7 +520,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>clear</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.704">clear</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.702">clear</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#clear--" title="class or interface in java.util">clear</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -533,7 +533,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>comparator</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;? super byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.709">comparator</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true" title="class or interface in java.util">Comparator</a>&lt;? super byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.707">comparator</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true#comparator--" title="class or interface in java.util">comparator</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -546,7 +546,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>containsKey</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.714">containsKey</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.712">containsKey</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#containsKey-java.lang.Object-" title="class or interface in java.util">containsKey</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -559,7 +559,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>containsValue</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.719">containsValue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;value)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.717">containsValue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;value)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#containsValue-java.lang.Object-" title="class or interface in java.util">containsValue</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -572,7 +572,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>entrySet</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;byte[],byte[]&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.724">entrySet</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;byte[],byte[]&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.722">entrySet</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#entrySet--" title="class or interface in java.util">entrySet</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -587,7 +587,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.729">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.727">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.util">equals</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -602,7 +602,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>firstKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.734">firstKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.732">firstKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true#firstKey--" title="class or interface in java.util">firstKey</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -615,7 +615,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>get</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.739">get</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.737">get</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#get-java.lang.Object-" title="class or interface in java.util">get</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -628,7 +628,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.744">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.742">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#hashCode--" title="class or interface in java.util">hashCode</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -643,7 +643,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>headMap</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.749">headMap</a>(byte[]&nbsp;toKey)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.747">headMap</a>(byte[]&nbsp;toKey)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true#headMap-K-" title="class or interface in java.util">headMap</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -656,7 +656,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>isEmpty</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.754">isEmpty</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.752">isEmpty</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#isEmpty--" title="class or interface in java.util">isEmpty</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -669,7 +669,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>keySet</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.759">keySet</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.757">keySet</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#keySet--" title="class or interface in java.util">keySet</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -684,7 +684,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>lastKey</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.764">lastKey</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.762">lastKey</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true#lastKey--" title="class or interface in java.util">lastKey</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -697,7 +697,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>put</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.769">put</a>(byte[]&nbsp;key,
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.767">put</a>(byte[]&nbsp;key,
                   byte[]&nbsp;value)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -711,7 +711,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>putAll</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.774">putAll</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;? extends byte[],? extends byte[]&gt;&nbsp;m)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.772">putAll</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;? extends byte[],? extends byte[]&gt;&nbsp;m)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#putAll-java.util.Map-" title="class or interface in java.util">putAll</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -724,7 +724,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>remove</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.779">remove</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.777">remove</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;key)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#remove-java.lang.Object-" title="class or interface in java.util">remove</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -737,7 +737,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>size</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.784">size</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.782">size</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#size--" title="class or interface in java.util">size</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -750,7 +750,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>subMap</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.789">subMap</a>(byte[]&nbsp;fromKey,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.787">subMap</a>(byte[]&nbsp;fromKey,
                                        byte[]&nbsp;toKey)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -764,7 +764,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>tailMap</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.794">tailMap</a>(byte[]&nbsp;fromKey)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.792">tailMap</a>(byte[]&nbsp;fromKey)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true#tailMap-K-" title="class or interface in java.util">tailMap</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -777,7 +777,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.799">values</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.797">values</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true#values--" title="class or interface in java.util">values</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;</code></dd>
@@ -792,7 +792,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.810">write</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.808">write</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io">DataOutputStream</a>&nbsp;out)
     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Write out this instance on the passed in <code>out</code> stream.
  We write it as a protobuf.</div>
@@ -812,7 +812,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>read</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.829">read</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.827">read</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Populate this instance with what we find on the passed in <code>in</code> stream.
  Can deserialize protobuf of old Writables format.</div>
@@ -832,7 +832,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockList">
 <li class="blockList">
 <h4>parseWritable</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.859">parseWritable</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.857">parseWritable</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io">DataInputStream</a>&nbsp;in)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a key and a value of
  a byte [].  The old map format had a byte before each entry that held a code which was short for the key or
@@ -849,7 +849,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMa
 <ul class="blockListLast">
 <li class="blockList">
 <h4>parsePB</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.878">parsePB</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto&nbsp;fip)</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#line.876">parsePB</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto&nbsp;fip)</pre>
 <div class="block">Fill our map with content of the pb we read off disk</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>


[38/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
index 73c9bec..7730b9c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.421">HFile.Reader</a>
+<pre>public static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.419">HFile.Reader</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.CachingBlockReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.CachingBlockReader</a></pre>
 <div class="block">An interface used by clients to open and iterate an <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>.</div>
 </li>
@@ -311,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.427">getName</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.425">getName</a>()</pre>
 <div class="block">Returns this reader's "name". Usually the last component of the path.
  Needs to be constant as the file is being moved to support caching on
  write.</div>
@@ -323,7 +323,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getComparator</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.429">getComparator</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/CellComparator.html" title="interface in org.apache.hadoop.hbase">CellComparator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.427">getComparator</a>()</pre>
 </li>
 </ul>
 <a name="getScanner-boolean-boolean-boolean-">
@@ -332,7 +332,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.431">getScanner</a>(boolean&nbsp;cacheBlocks,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.429">getScanner</a>(boolean&nbsp;cacheBlocks,
                         boolean&nbsp;pread,
                         boolean&nbsp;isCompaction)</pre>
 </li>
@@ -343,7 +343,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaBlock</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.433">getMetaBlock</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;metaBlockName,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlock</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.431">getMetaBlock</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;metaBlockName,
                         boolean&nbsp;cacheBlock)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -358,7 +358,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>loadFileInfo</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.435">loadFileInfo</a>()
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.433">loadFileInfo</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -372,7 +372,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastKey</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.437">getLastKey</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.435">getLastKey</a>()</pre>
 </li>
 </ul>
 <a name="midKey--">
@@ -381,7 +381,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>midKey</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.439">midKey</a>()
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.437">midKey</a>()
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -395,7 +395,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>length</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.441">length</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.439">length</a>()</pre>
 </li>
 </ul>
 <a name="getEntries--">
@@ -404,7 +404,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getEntries</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.443">getEntries</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.441">getEntries</a>()</pre>
 </li>
 </ul>
 <a name="getFirstKey--">
@@ -413,7 +413,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getFirstKey</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.445">getFirstKey</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.443">getFirstKey</a>()</pre>
 </li>
 </ul>
 <a name="indexSize--">
@@ -422,7 +422,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>indexSize</h4>
-<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.447">indexSize</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.445">indexSize</a>()</pre>
 </li>
 </ul>
 <a name="getFirstRowKey--">
@@ -431,7 +431,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getFirstRowKey</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.449">getFirstRowKey</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.447">getFirstRowKey</a>()</pre>
 </li>
 </ul>
 <a name="getLastRowKey--">
@@ -440,7 +440,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastRowKey</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.451">getLastRowKey</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;byte[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.449">getLastRowKey</a>()</pre>
 </li>
 </ul>
 <a name="getTrailer--">
@@ -449,7 +449,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getTrailer</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.453">getTrailer</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.html" title="class in org.apache.hadoop.hbase.io.hfile">FixedFileTrailer</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.451">getTrailer</a>()</pre>
 </li>
 </ul>
 <a name="getDataBlockIndexReader--">
@@ -458,7 +458,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockIndexReader</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.455">getDataBlockIndexReader</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.453">getDataBlockIndexReader</a>()</pre>
 </li>
 </ul>
 <a name="getScanner-boolean-boolean-">
@@ -467,7 +467,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getScanner</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.457">getScanner</a>(boolean&nbsp;cacheBlocks,
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileScanner.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileScanner</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.455">getScanner</a>(boolean&nbsp;cacheBlocks,
                         boolean&nbsp;pread)</pre>
 </li>
 </ul>
@@ -477,7 +477,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getCompressionAlgorithm</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.459">getCompressionAlgorithm</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.457">getCompressionAlgorithm</a>()</pre>
 </li>
 </ul>
 <a name="getGeneralBloomFilterMetadata--">
@@ -486,7 +486,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getGeneralBloomFilterMetadata</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.466">getGeneralBloomFilterMetadata</a>()
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.464">getGeneralBloomFilterMetadata</a>()
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Retrieves general Bloom filter metadata as appropriate for each
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> version.
@@ -503,7 +503,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getDeleteBloomFilterMetadata</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.473">getDeleteBloomFilterMetadata</a>()
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.471">getDeleteBloomFilterMetadata</a>()
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Retrieves delete family Bloom filter metadata as appropriate for each
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>  version.
@@ -520,7 +520,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getPath</h4>
-<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.475">getPath</a>()</pre>
+<pre>org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.473">getPath</a>()</pre>
 </li>
 </ul>
 <a name="close-boolean-">
@@ -529,7 +529,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.478">close</a>(boolean&nbsp;evictOnClose)
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.476">close</a>(boolean&nbsp;evictOnClose)
     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Close method with optional evictOnClose</div>
 <dl>
@@ -544,7 +544,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getDataBlockEncoding</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.480">getDataBlockEncoding</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.478">getDataBlockEncoding</a>()</pre>
 </li>
 </ul>
 <a name="hasMVCCInfo--">
@@ -553,7 +553,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>hasMVCCInfo</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.482">hasMVCCInfo</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.480">hasMVCCInfo</a>()</pre>
 </li>
 </ul>
 <a name="getFileContext--">
@@ -562,7 +562,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileContext</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.487">getFileContext</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.485">getFileContext</a>()</pre>
 <div class="block">Return the file context of the HFile this reader belongs to</div>
 </li>
 </ul>
@@ -572,7 +572,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>isPrimaryReplicaReader</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.489">isPrimaryReplicaReader</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.487">isPrimaryReplicaReader</a>()</pre>
 </li>
 </ul>
 <a name="shouldIncludeMemStoreTS--">
@@ -581,7 +581,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldIncludeMemStoreTS</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.491">shouldIncludeMemStoreTS</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.489">shouldIncludeMemStoreTS</a>()</pre>
 </li>
 </ul>
 <a name="isDecodeMemStoreTS--">
@@ -590,7 +590,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>isDecodeMemStoreTS</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.493">isDecodeMemStoreTS</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.491">isDecodeMemStoreTS</a>()</pre>
 </li>
 </ul>
 <a name="getEffectiveEncodingInCache-boolean-">
@@ -599,7 +599,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getEffectiveEncodingInCache</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.495">getEffectiveEncodingInCache</a>(boolean&nbsp;isCompaction)</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.493">getEffectiveEncodingInCache</a>(boolean&nbsp;isCompaction)</pre>
 </li>
 </ul>
 <a name="getUncachedBlockReader--">
@@ -608,7 +608,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>getUncachedBlockReader</h4>
-<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.498">getUncachedBlockReader</a>()</pre>
+<pre><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.FSReader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFileBlock.FSReader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.496">getUncachedBlockReader</a>()</pre>
 </li>
 </ul>
 <a name="prefetchComplete--">
@@ -617,7 +617,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>prefetchComplete</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.501">prefetchComplete</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.499">prefetchComplete</a>()</pre>
 </li>
 </ul>
 <a name="unbufferStream--">
@@ -626,7 +626,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>unbufferStream</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.507">unbufferStream</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.Reader.html#line.505">unbufferStream</a>()</pre>
 <div class="block">To close the stream's socket. Note: This can be concurrently called from multiple threads and
  implementation should take care of thread safety.</div>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
index aed8dbe..617db09 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFile.html
@@ -779,7 +779,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getWriterFactory</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.367">getWriterFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.WriterFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.WriterFactory</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.365">getWriterFactory</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                          <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Returns the factory to be used to create <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> writers</div>
 </li>
@@ -790,7 +790,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>openReader</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.525">openReader</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.523">openReader</a>(org.apache.hadoop.fs.Path&nbsp;path,
                                        <a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;fsdis,
                                        long&nbsp;size,
                                        <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
@@ -821,7 +821,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.565">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.563">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                         org.apache.hadoop.fs.Path&nbsp;path,
                                         <a href="../../../../../../org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.html" title="class in org.apache.hadoop.hbase.io">FSDataInputStreamWrapper</a>&nbsp;fsdis,
                                         long&nbsp;size,
@@ -854,7 +854,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.590">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.588">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                         org.apache.hadoop.fs.Path&nbsp;path,
                                         org.apache.hadoop.conf.Configuration&nbsp;conf)
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -877,7 +877,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReader</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.607">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.605">createReader</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                         org.apache.hadoop.fs.Path&nbsp;path,
                                         <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
                                         boolean&nbsp;primaryReplicaReader,
@@ -904,7 +904,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createReaderFromStream</h4>
-<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.622">createReaderFromStream</a>(org.apache.hadoop.fs.Path&nbsp;path,
+<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.Reader.html" title="interface in org.apache.hadoop.hbase.io.hfile">HFile.Reader</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.620">createReaderFromStream</a>(org.apache.hadoop.fs.Path&nbsp;path,
                                            org.apache.hadoop.fs.FSDataInputStream&nbsp;fsdis,
                                            long&nbsp;size,
                                            <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf,
@@ -926,7 +926,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isHFileFormat</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.635">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.633">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                     org.apache.hadoop.fs.Path&nbsp;path)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns true if the specified file has a valid HFile Trailer.</div>
@@ -947,7 +947,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isHFileFormat</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.646">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.644">isHFileFormat</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                     org.apache.hadoop.fs.FileStatus&nbsp;fileStatus)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns true if the specified file has a valid HFile Trailer.</div>
@@ -968,7 +968,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isReservedFileInfoKey</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.887">isReservedFileInfoKey</a>(byte[]&nbsp;key)</pre>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.885">isReservedFileInfoKey</a>(byte[]&nbsp;key)</pre>
 <div class="block">Return true if the given file info key is reserved for internal use.</div>
 </li>
 </ul>
@@ -978,7 +978,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getSupportedCompressionAlgorithms</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.903">getSupportedCompressionAlgorithms</a>()</pre>
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.901">getSupportedCompressionAlgorithms</a>()</pre>
 <div class="block">Get names of supported compression algorithms. The names are acceptable by
  HFile.Writer.</div>
 <dl>
@@ -999,7 +999,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>longToInt</h4>
-<pre>static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.912">longToInt</a>(long&nbsp;l)</pre>
+<pre>static&nbsp;int&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.910">longToInt</a>(long&nbsp;l)</pre>
 </li>
 </ul>
 <a name="getStoreFiles-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-">
@@ -1008,7 +1008,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFiles</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.927">getStoreFiles</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.925">getStoreFiles</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                      org.apache.hadoop.fs.Path&nbsp;regionDir)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Returns all HFiles belonging to the given region directory. Could return an
@@ -1030,7 +1030,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkFormatVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.954">checkFormatVersion</a>(int&nbsp;version)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.952">checkFormatVersion</a>(int&nbsp;version)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/IllegalArgumentException.html?is-external=true" title="class or interface in java.lang">IllegalArgumentException</a></pre>
 <div class="block">Checks the given <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a> format version, and throws an exception if
  invalid. Note that if the version number comes from an input file and has
@@ -1050,7 +1050,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHFileVersion</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.964">checkHFileVersion</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.962">checkHFileVersion</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 </li>
 </ul>
 <a name="main-java.lang.String:A-">
@@ -1059,7 +1059,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>main</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.975">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/HFile.html#line.973">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
index 58757b6..a240411 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
@@ -107,7 +107,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.491">HFileOutputFormat2.WriterLength</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.489">HFileOutputFormat2.WriterLength</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -188,7 +188,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>written</h4>
-<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html#line.492">written</a></pre>
+<pre>long <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html#line.490">written</a></pre>
 </li>
 </ul>
 <a name="writer">
@@ -197,7 +197,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writer</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html#line.493">writer</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html#line.491">writer</a></pre>
 </li>
 </ul>
 </li>
@@ -214,7 +214,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WriterLength</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html#line.491">WriterLength</a>()</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html#line.489">WriterLength</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 60a9095..419cd9e 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -649,7 +649,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>compressionDetails</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.898">compressionDetails</a></pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.896">compressionDetails</a></pre>
 <div class="block">Serialize column family to compression algorithm map to configuration.
  Invoked while configuring the MR job for incremental load.</div>
 </li>
@@ -660,7 +660,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>blockSizeDetails</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.914">blockSizeDetails</a></pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.912">blockSizeDetails</a></pre>
 <div class="block">Serialize column family to block size map to configuration. Invoked while
  configuring the MR job for incremental load.</div>
 </li>
@@ -671,7 +671,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>bloomTypeDetails</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.930">bloomTypeDetails</a></pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.928">bloomTypeDetails</a></pre>
 <div class="block">Serialize column family to bloom type map to configuration. Invoked while
  configuring the MR job for incremental load.</div>
 </li>
@@ -682,7 +682,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>bloomParamDetails</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.951">bloomParamDetails</a></pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.949">bloomParamDetails</a></pre>
 <div class="block">Serialize column family to bloom param map to configuration. Invoked while
  configuring the MR job for incremental load.</div>
 </li>
@@ -693,7 +693,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>dataBlockEncodingDetails</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.974">dataBlockEncodingDetails</a></pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.972">dataBlockEncodingDetails</a></pre>
 <div class="block">Serialize column family to data block encoding map to configuration.
  Invoked while configuring the MR job for incremental load.</div>
 </li>
@@ -782,7 +782,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureStoragePolicy</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.476">configureStoragePolicy</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.474">configureStoragePolicy</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                    org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                    byte[]&nbsp;tableAndFamily,
                                    org.apache.hadoop.fs.Path&nbsp;cfPath)</pre>
@@ -795,7 +795,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionStartKeys</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.500">getRegionStartKeys</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&gt;&nbsp;regionLocators,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.498">getRegionStartKeys</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&gt;&nbsp;regionLocators,
                                                                boolean&nbsp;writeMultipleTables)
                                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Return the start keys of all of the regions in this table,
@@ -812,7 +812,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>writePartitions</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.532">writePartitions</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.530">writePartitions</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                     org.apache.hadoop.fs.Path&nbsp;partitionsPath,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>&gt;&nbsp;startKeys,
                                     boolean&nbsp;writeMultipleTables)
@@ -831,7 +831,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureIncrementalLoad</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.585">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.583">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;table,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;regionLocator)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -859,7 +859,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureIncrementalLoad</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.604">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.602">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDescriptor,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;regionLocator)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -887,7 +887,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureIncrementalLoad</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.611">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.609">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html" title="class in org.apache.hadoop.hbase.mapreduce">HFileOutputFormat2.TableInfo</a>&gt;&nbsp;multiTableInfo,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends org.apache.hadoop.mapreduce.OutputFormat&lt;?,?&gt;&gt;&nbsp;cls)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -903,7 +903,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureIncrementalLoadMap</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.690">configureIncrementalLoadMap</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.688">configureIncrementalLoadMap</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                                <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDescriptor)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -918,7 +918,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>createFamilyCompressionMap</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.727">createFamilyCompressionMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.725">createFamilyCompressionMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Runs inside the task to deserialize column family to compression algorithm
  map from the configuration.</div>
 <dl>
@@ -935,7 +935,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>createFamilyBloomTypeMap</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver">BloomType</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.747">createFamilyBloomTypeMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver">BloomType</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.745">createFamilyBloomTypeMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Runs inside the task to deserialize column family to bloom filter type
  map from the configuration.</div>
 <dl>
@@ -952,7 +952,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>createFamilyBloomParamMap</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.766">createFamilyBloomParamMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.764">createFamilyBloomParamMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Runs inside the task to deserialize column family to bloom filter param
  map from the configuration.</div>
 <dl>
@@ -969,7 +969,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>createFamilyBlockSizeMap</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.779">createFamilyBlockSizeMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.777">createFamilyBlockSizeMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Runs inside the task to deserialize column family to block size
  map from the configuration.</div>
 <dl>
@@ -986,7 +986,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>createFamilyDataBlockEncodingMap</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.799">createFamilyDataBlockEncodingMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="../../../../../org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.html" title="enum in org.apache.hadoop.hbase.io.encoding">DataBlockEncoding</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.797">createFamilyDataBlockEncodingMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Runs inside the task to deserialize column family to data block encoding
  type map from the configuration.</div>
 <dl>
@@ -1004,7 +1004,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>createFamilyConfValueMap</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.818">createFamilyConfValueMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.816">createFamilyConfValueMap</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;confName)</pre>
 <div class="block">Run inside the task to deserialize column family to given conf value map.</div>
 <dl>
@@ -1022,7 +1022,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configurePartitioner</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.842">configurePartitioner</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.840">configurePartitioner</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a>&gt;&nbsp;splitPoints,
                                  boolean&nbsp;writeMultipleTables)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1040,7 +1040,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>serializeColumnFamilyAttribute</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.863">serializeColumnFamilyAttribute</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fn,
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.861">serializeColumnFamilyAttribute</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;fn,
                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt;&nbsp;allTables)
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/UnsupportedEncodingException.html?is-external=true" title="class or interface in java.io">UnsupportedEncodingException</a></pre>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index 864bbb2..937f09e 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2108">HBaseFsck.CheckRegionConsistencyWorkItem</a>
+<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.html#line.2107">HBaseFsck.CheckRegionConsistencyWorkItem</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;</pre>
 </li>
@@ -211,7 +211,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockList">
 <li class="blockList">
 <h4>key</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2109">key</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2108">key</a></pre>
 </li>
 </ul>
 <a name="hbi">
@@ -220,7 +220,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>hbi</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2110">hbi</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2109">hbi</a></pre>
 </li>
 </ul>
 </li>
@@ -237,7 +237,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CheckRegionConsistencyWorkItem</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2112">CheckRegionConsistencyWorkItem</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2111">CheckRegionConsistencyWorkItem</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;key,
                                <a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html" title="class in org.apache.hadoop.hbase.util">HBaseFsck.HbckInfo</a>&nbsp;hbi)</pre>
 </li>
 </ul>
@@ -255,7 +255,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurre
 <ul class="blockListLast">
 <li class="blockList">
 <h4>call</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2118">call</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html#line.2117">call</a>()
           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>


[21/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.CheckRegionConsistencyWorkItem.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<sp

<TRUNCATED>

[40/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 5ee521a..68dec5a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -422,572 +422,570 @@
 <span class="sourceLineNo">414</span>        DataBlockEncoding encoding = overriddenEncoding;<a name="line.414"></a>
 <span class="sourceLineNo">415</span>        encoding = encoding == null ? datablockEncodingMap.get(tableAndFamily) : encoding;<a name="line.415"></a>
 <span class="sourceLineNo">416</span>        encoding = encoding == null ? DataBlockEncoding.NONE : encoding;<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        Configuration tempConf = new Configuration(conf);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.419"></a>
-<span class="sourceLineNo">420</span>                                    .withCompression(compression)<a name="line.420"></a>
-<span class="sourceLineNo">421</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.421"></a>
-<span class="sourceLineNo">422</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                                    .withBlockSize(blockSize);<a name="line.423"></a>
-<span class="sourceLineNo">424</span><a name="line.424"></a>
-<span class="sourceLineNo">425</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>          contextBuilder.withIncludesTags(true);<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        }<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        if (null == favoredNodes) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          wl.writer =<a name="line.432"></a>
-<span class="sourceLineNo">433</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), fs)<a name="line.433"></a>
-<span class="sourceLineNo">434</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.435"></a>
-<span class="sourceLineNo">436</span>        } else {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>          wl.writer =<a name="line.437"></a>
-<span class="sourceLineNo">438</span>              new StoreFileWriter.Builder(conf, new CacheConfig(tempConf), new HFileSystem(fs))<a name="line.438"></a>
-<span class="sourceLineNo">439</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.439"></a>
-<span class="sourceLineNo">440</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.440"></a>
-<span class="sourceLineNo">441</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
-<span class="sourceLineNo">443</span><a name="line.443"></a>
-<span class="sourceLineNo">444</span>        this.writers.put(tableAndFamily, wl);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        return wl;<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        if (w != null) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.450"></a>
-<span class="sourceLineNo">451</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.451"></a>
-<span class="sourceLineNo">452</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.452"></a>
-<span class="sourceLineNo">453</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>              Bytes.toBytes(true));<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.456"></a>
-<span class="sourceLineNo">457</span>              Bytes.toBytes(compactionExclude));<a name="line.457"></a>
-<span class="sourceLineNo">458</span>          w.appendTrackedTimestampsToMetadata();<a name="line.458"></a>
-<span class="sourceLineNo">459</span>          w.close();<a name="line.459"></a>
-<span class="sourceLineNo">460</span>        }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      }<a name="line.461"></a>
-<span class="sourceLineNo">462</span><a name="line.462"></a>
-<span class="sourceLineNo">463</span>      @Override<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      public void close(TaskAttemptContext c)<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      throws IOException, InterruptedException {<a name="line.465"></a>
-<span class="sourceLineNo">466</span>        for (WriterLength wl: this.writers.values()) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          close(wl.writer);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>        }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    };<a name="line.470"></a>
-<span class="sourceLineNo">471</span>  }<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>  /**<a name="line.473"></a>
-<span class="sourceLineNo">474</span>   * Configure block storage policy for CF after the directory is created.<a name="line.474"></a>
-<span class="sourceLineNo">475</span>   */<a name="line.475"></a>
-<span class="sourceLineNo">476</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      return;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span><a name="line.481"></a>
-<span class="sourceLineNo">482</span>    String policy =<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.483"></a>
-<span class="sourceLineNo">484</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.485"></a>
-<span class="sourceLineNo">486</span>  }<a name="line.486"></a>
-<span class="sourceLineNo">487</span><a name="line.487"></a>
-<span class="sourceLineNo">488</span>  /*<a name="line.488"></a>
-<span class="sourceLineNo">489</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.489"></a>
-<span class="sourceLineNo">490</span>   */<a name="line.490"></a>
-<span class="sourceLineNo">491</span>  static class WriterLength {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    long written = 0;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    StoreFileWriter writer = null;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Return the start keys of all of the regions in this table,<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * as a list of ImmutableBytesWritable.<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   */<a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.500"></a>
-<span class="sourceLineNo">501</span>                                                                 boolean writeMultipleTables)<a name="line.501"></a>
-<span class="sourceLineNo">502</span>          throws IOException {<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      TableName tableName = regionLocator.getName();<a name="line.507"></a>
-<span class="sourceLineNo">508</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      for (byte[] byteKey : byteKeys) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.511"></a>
-<span class="sourceLineNo">512</span>        if (writeMultipleTables)<a name="line.512"></a>
-<span class="sourceLineNo">513</span>        {<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          //MultiTableHFileOutputFormat use case<a name="line.514"></a>
-<span class="sourceLineNo">515</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        }<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        if (LOG.isDebugEnabled()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.518"></a>
-<span class="sourceLineNo">519</span>                  (fullKey) + "]");<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      }<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    }<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    return ret;<a name="line.524"></a>
-<span class="sourceLineNo">525</span>  }<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>  /**<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.528"></a>
-<span class="sourceLineNo">529</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   */<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  @SuppressWarnings("deprecation")<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.532"></a>
-<span class="sourceLineNo">533</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    if (startKeys.isEmpty()) {<a name="line.535"></a>
-<span class="sourceLineNo">536</span>      throw new IllegalArgumentException("No regions passed");<a name="line.536"></a>
-<span class="sourceLineNo">537</span>    }<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>    // We're generating a list of split points, and we don't ever<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    // empty reducer with index 0<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    ImmutableBytesWritable first = sorted.first();<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    if (writeMultipleTables) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              ().get()));<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    }<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      throw new IllegalArgumentException(<a name="line.550"></a>
-<span class="sourceLineNo">551</span>          "First region of table should have empty start key. Instead has: "<a name="line.551"></a>
-<span class="sourceLineNo">552</span>          + Bytes.toStringBinary(first.get()));<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    sorted.remove(sorted.first());<a name="line.554"></a>
-<span class="sourceLineNo">555</span><a name="line.555"></a>
-<span class="sourceLineNo">556</span>    // Write the actual file<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      NullWritable.class);<a name="line.560"></a>
-<span class="sourceLineNo">561</span><a name="line.561"></a>
-<span class="sourceLineNo">562</span>    try {<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.563"></a>
-<span class="sourceLineNo">564</span>        writer.append(startKey, NullWritable.get());<a name="line.564"></a>
-<span class="sourceLineNo">565</span>      }<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    } finally {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      writer.close();<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  /**<a name="line.571"></a>
-<span class="sourceLineNo">572</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.572"></a>
-<span class="sourceLineNo">573</span>   * table. This<a name="line.573"></a>
-<span class="sourceLineNo">574</span>   * &lt;ul&gt;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.576"></a>
-<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.577"></a>
-<span class="sourceLineNo">578</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.579"></a>
-<span class="sourceLineNo">580</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   * &lt;/ul&gt;<a name="line.581"></a>
-<span class="sourceLineNo">582</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   * running this function.<a name="line.583"></a>
-<span class="sourceLineNo">584</span>   */<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      throws IOException {<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>  }<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>  /**<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * table. This<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   * &lt;ul&gt;<a name="line.593"></a>
-<span class="sourceLineNo">594</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.594"></a>
-<span class="sourceLineNo">595</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.596"></a>
-<span class="sourceLineNo">597</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.597"></a>
-<span class="sourceLineNo">598</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.598"></a>
-<span class="sourceLineNo">599</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>   * &lt;/ul&gt;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.601"></a>
-<span class="sourceLineNo">602</span>   * running this function.<a name="line.602"></a>
-<span class="sourceLineNo">603</span>   */<a name="line.603"></a>
-<span class="sourceLineNo">604</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      RegionLocator regionLocator) throws IOException {<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.608"></a>
-<span class="sourceLineNo">609</span>  }<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    Configuration conf = job.getConfiguration();<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    job.setOutputFormatClass(cls);<a name="line.616"></a>
-<span class="sourceLineNo">617</span><a name="line.617"></a>
-<span class="sourceLineNo">618</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    }<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    boolean writeMultipleTables = false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      writeMultipleTables = true;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.626"></a>
-<span class="sourceLineNo">627</span>    // sort the incoming values.<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.628"></a>
-<span class="sourceLineNo">629</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>      job.setReducerClass(CellSortReducer.class);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>      job.setReducerClass(PutSortReducer.class);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      job.setReducerClass(TextSortReducer.class);<a name="line.635"></a>
-<span class="sourceLineNo">636</span>    } else {<a name="line.636"></a>
-<span class="sourceLineNo">637</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.637"></a>
-<span class="sourceLineNo">638</span>    }<a name="line.638"></a>
-<span class="sourceLineNo">639</span><a name="line.639"></a>
-<span class="sourceLineNo">640</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.640"></a>
-<span class="sourceLineNo">641</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.641"></a>
-<span class="sourceLineNo">642</span>        CellSerialization.class.getName());<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    }<a name="line.646"></a>
-<span class="sourceLineNo">647</span><a name="line.647"></a>
-<span class="sourceLineNo">648</span>    /* Now get the region start keys for every table required */<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.651"></a>
-<span class="sourceLineNo">652</span><a name="line.652"></a>
-<span class="sourceLineNo">653</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    {<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.655"></a>
-<span class="sourceLineNo">656</span>      String tn = writeMultipleTables?<a name="line.656"></a>
-<span class="sourceLineNo">657</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.657"></a>
-<span class="sourceLineNo">658</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      allTableNames.add(tn);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.662"></a>
-<span class="sourceLineNo">663</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.663"></a>
-<span class="sourceLineNo">664</span>            .toString(tableSeparator)));<a name="line.664"></a>
-<span class="sourceLineNo">665</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>    // Use table's region boundaries for TOP split points.<a name="line.666"></a>
-<span class="sourceLineNo">667</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.667"></a>
-<span class="sourceLineNo">668</span>        "to match current region count for all tables");<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    job.setNumReduceTasks(startKeys.size());<a name="line.669"></a>
-<span class="sourceLineNo">670</span><a name="line.670"></a>
-<span class="sourceLineNo">671</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    // Set compression algorithms based on column families<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.674"></a>
+<span class="sourceLineNo">417</span>        HFileContextBuilder contextBuilder = new HFileContextBuilder()<a name="line.417"></a>
+<span class="sourceLineNo">418</span>                                    .withCompression(compression)<a name="line.418"></a>
+<span class="sourceLineNo">419</span>                                    .withChecksumType(HStore.getChecksumType(conf))<a name="line.419"></a>
+<span class="sourceLineNo">420</span>                                    .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))<a name="line.420"></a>
+<span class="sourceLineNo">421</span>                                    .withBlockSize(blockSize);<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>        if (HFile.getFormatVersion(conf) &gt;= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>          contextBuilder.withIncludesTags(true);<a name="line.424"></a>
+<span class="sourceLineNo">425</span>        }<a name="line.425"></a>
+<span class="sourceLineNo">426</span><a name="line.426"></a>
+<span class="sourceLineNo">427</span>        contextBuilder.withDataBlockEncoding(encoding);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>        HFileContext hFileContext = contextBuilder.build();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>        if (null == favoredNodes) {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>          wl.writer =<a name="line.430"></a>
+<span class="sourceLineNo">431</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, fs)<a name="line.431"></a>
+<span class="sourceLineNo">432</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.432"></a>
+<span class="sourceLineNo">433</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext).build();<a name="line.433"></a>
+<span class="sourceLineNo">434</span>        } else {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>          wl.writer =<a name="line.435"></a>
+<span class="sourceLineNo">436</span>              new StoreFileWriter.Builder(conf, CacheConfig.DISABLED, new HFileSystem(fs))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>                  .withOutputDir(familydir).withBloomType(bloomType)<a name="line.437"></a>
+<span class="sourceLineNo">438</span>                  .withComparator(CellComparator.getInstance()).withFileContext(hFileContext)<a name="line.438"></a>
+<span class="sourceLineNo">439</span>                  .withFavoredNodes(favoredNodes).build();<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        }<a name="line.440"></a>
+<span class="sourceLineNo">441</span><a name="line.441"></a>
+<span class="sourceLineNo">442</span>        this.writers.put(tableAndFamily, wl);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        return wl;<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
+<span class="sourceLineNo">445</span><a name="line.445"></a>
+<span class="sourceLineNo">446</span>      private void close(final StoreFileWriter w) throws IOException {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>        if (w != null) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>          w.appendFileInfo(BULKLOAD_TIME_KEY,<a name="line.448"></a>
+<span class="sourceLineNo">449</span>              Bytes.toBytes(System.currentTimeMillis()));<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          w.appendFileInfo(BULKLOAD_TASK_KEY,<a name="line.450"></a>
+<span class="sourceLineNo">451</span>              Bytes.toBytes(context.getTaskAttemptID().toString()));<a name="line.451"></a>
+<span class="sourceLineNo">452</span>          w.appendFileInfo(MAJOR_COMPACTION_KEY,<a name="line.452"></a>
+<span class="sourceLineNo">453</span>              Bytes.toBytes(true));<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          w.appendFileInfo(EXCLUDE_FROM_MINOR_COMPACTION_KEY,<a name="line.454"></a>
+<span class="sourceLineNo">455</span>              Bytes.toBytes(compactionExclude));<a name="line.455"></a>
+<span class="sourceLineNo">456</span>          w.appendTrackedTimestampsToMetadata();<a name="line.456"></a>
+<span class="sourceLineNo">457</span>          w.close();<a name="line.457"></a>
+<span class="sourceLineNo">458</span>        }<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>      @Override<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      public void close(TaskAttemptContext c)<a name="line.462"></a>
+<span class="sourceLineNo">463</span>      throws IOException, InterruptedException {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        for (WriterLength wl: this.writers.values()) {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>          close(wl.writer);<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        }<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    };<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * Configure block storage policy for CF after the directory is created.<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
+<span class="sourceLineNo">474</span>  static void configureStoragePolicy(final Configuration conf, final FileSystem fs,<a name="line.474"></a>
+<span class="sourceLineNo">475</span>      byte[] tableAndFamily, Path cfPath) {<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    if (null == conf || null == fs || null == tableAndFamily || null == cfPath) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      return;<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    }<a name="line.478"></a>
+<span class="sourceLineNo">479</span><a name="line.479"></a>
+<span class="sourceLineNo">480</span>    String policy =<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        conf.get(STORAGE_POLICY_PROPERTY_CF_PREFIX + Bytes.toString(tableAndFamily),<a name="line.481"></a>
+<span class="sourceLineNo">482</span>          conf.get(STORAGE_POLICY_PROPERTY));<a name="line.482"></a>
+<span class="sourceLineNo">483</span>    FSUtils.setStoragePolicy(fs, cfPath, policy);<a name="line.483"></a>
+<span class="sourceLineNo">484</span>  }<a name="line.484"></a>
+<span class="sourceLineNo">485</span><a name="line.485"></a>
+<span class="sourceLineNo">486</span>  /*<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * Data structure to hold a Writer and amount of data written on it.<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  static class WriterLength {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    long written = 0;<a name="line.490"></a>
+<span class="sourceLineNo">491</span>    StoreFileWriter writer = null;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>  }<a name="line.492"></a>
+<span class="sourceLineNo">493</span><a name="line.493"></a>
+<span class="sourceLineNo">494</span>  /**<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * Return the start keys of all of the regions in this table,<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   * as a list of ImmutableBytesWritable.<a name="line.496"></a>
+<span class="sourceLineNo">497</span>   */<a name="line.497"></a>
+<span class="sourceLineNo">498</span>  private static List&lt;ImmutableBytesWritable&gt; getRegionStartKeys(List&lt;RegionLocator&gt; regionLocators,<a name="line.498"></a>
+<span class="sourceLineNo">499</span>                                                                 boolean writeMultipleTables)<a name="line.499"></a>
+<span class="sourceLineNo">500</span>          throws IOException {<a name="line.500"></a>
+<span class="sourceLineNo">501</span><a name="line.501"></a>
+<span class="sourceLineNo">502</span>    ArrayList&lt;ImmutableBytesWritable&gt; ret = new ArrayList&lt;&gt;();<a name="line.502"></a>
+<span class="sourceLineNo">503</span>    for(RegionLocator regionLocator : regionLocators)<a name="line.503"></a>
+<span class="sourceLineNo">504</span>    {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>      TableName tableName = regionLocator.getName();<a name="line.505"></a>
+<span class="sourceLineNo">506</span>      LOG.info("Looking up current regions for table " + tableName);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      byte[][] byteKeys = regionLocator.getStartKeys();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>      for (byte[] byteKey : byteKeys) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>        byte[] fullKey = byteKey; //HFileOutputFormat2 use case<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        if (writeMultipleTables)<a name="line.510"></a>
+<span class="sourceLineNo">511</span>        {<a name="line.511"></a>
+<span class="sourceLineNo">512</span>          //MultiTableHFileOutputFormat use case<a name="line.512"></a>
+<span class="sourceLineNo">513</span>          fullKey = combineTableNameSuffix(tableName.getName(), byteKey);<a name="line.513"></a>
+<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>        if (LOG.isDebugEnabled()) {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>          LOG.debug("SplitPoint startkey for table [" + tableName + "]: [" + Bytes.toStringBinary<a name="line.516"></a>
+<span class="sourceLineNo">517</span>                  (fullKey) + "]");<a name="line.517"></a>
+<span class="sourceLineNo">518</span>        }<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        ret.add(new ImmutableBytesWritable(fullKey));<a name="line.519"></a>
+<span class="sourceLineNo">520</span>      }<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    }<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    return ret;<a name="line.522"></a>
+<span class="sourceLineNo">523</span>  }<a name="line.523"></a>
+<span class="sourceLineNo">524</span><a name="line.524"></a>
+<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
+<span class="sourceLineNo">526</span>   * Write out a {@link SequenceFile} that can be read by<a name="line.526"></a>
+<span class="sourceLineNo">527</span>   * {@link TotalOrderPartitioner} that contains the split points in startKeys.<a name="line.527"></a>
+<span class="sourceLineNo">528</span>   */<a name="line.528"></a>
+<span class="sourceLineNo">529</span>  @SuppressWarnings("deprecation")<a name="line.529"></a>
+<span class="sourceLineNo">530</span>  private static void writePartitions(Configuration conf, Path partitionsPath,<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      List&lt;ImmutableBytesWritable&gt; startKeys, boolean writeMultipleTables) throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    LOG.info("Writing partition information to " + partitionsPath);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>    if (startKeys.isEmpty()) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>      throw new IllegalArgumentException("No regions passed");<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    }<a name="line.535"></a>
+<span class="sourceLineNo">536</span><a name="line.536"></a>
+<span class="sourceLineNo">537</span>    // We're generating a list of split points, and we don't ever<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // have keys &lt; the first region (which has an empty start key)<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    // so we need to remove it. Otherwise we would end up with an<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    // empty reducer with index 0<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    TreeSet&lt;ImmutableBytesWritable&gt; sorted = new TreeSet&lt;&gt;(startKeys);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    ImmutableBytesWritable first = sorted.first();<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    if (writeMultipleTables) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>      first = new ImmutableBytesWritable(MultiTableHFileOutputFormat.getSuffix(sorted.first<a name="line.544"></a>
+<span class="sourceLineNo">545</span>              ().get()));<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      throw new IllegalArgumentException(<a name="line.548"></a>
+<span class="sourceLineNo">549</span>          "First region of table should have empty start key. Instead has: "<a name="line.549"></a>
+<span class="sourceLineNo">550</span>          + Bytes.toStringBinary(first.get()));<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    }<a name="line.551"></a>
+<span class="sourceLineNo">552</span>    sorted.remove(sorted.first());<a name="line.552"></a>
+<span class="sourceLineNo">553</span><a name="line.553"></a>
+<span class="sourceLineNo">554</span>    // Write the actual file<a name="line.554"></a>
+<span class="sourceLineNo">555</span>    FileSystem fs = partitionsPath.getFileSystem(conf);<a name="line.555"></a>
+<span class="sourceLineNo">556</span>    SequenceFile.Writer writer = SequenceFile.createWriter(<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      fs, conf, partitionsPath, ImmutableBytesWritable.class,<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      NullWritable.class);<a name="line.558"></a>
+<span class="sourceLineNo">559</span><a name="line.559"></a>
+<span class="sourceLineNo">560</span>    try {<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      for (ImmutableBytesWritable startKey : sorted) {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        writer.append(startKey, NullWritable.get());<a name="line.562"></a>
+<span class="sourceLineNo">563</span>      }<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    } finally {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      writer.close();<a name="line.565"></a>
+<span class="sourceLineNo">566</span>    }<a name="line.566"></a>
+<span class="sourceLineNo">567</span>  }<a name="line.567"></a>
+<span class="sourceLineNo">568</span><a name="line.568"></a>
+<span class="sourceLineNo">569</span>  /**<a name="line.569"></a>
+<span class="sourceLineNo">570</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.570"></a>
+<span class="sourceLineNo">571</span>   * table. This<a name="line.571"></a>
+<span class="sourceLineNo">572</span>   * &lt;ul&gt;<a name="line.572"></a>
+<span class="sourceLineNo">573</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.573"></a>
+<span class="sourceLineNo">574</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.574"></a>
+<span class="sourceLineNo">575</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.575"></a>
+<span class="sourceLineNo">576</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.576"></a>
+<span class="sourceLineNo">577</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.577"></a>
+<span class="sourceLineNo">578</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.578"></a>
+<span class="sourceLineNo">579</span>   * &lt;/ul&gt;<a name="line.579"></a>
+<span class="sourceLineNo">580</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.580"></a>
+<span class="sourceLineNo">581</span>   * running this function.<a name="line.581"></a>
+<span class="sourceLineNo">582</span>   */<a name="line.582"></a>
+<span class="sourceLineNo">583</span>  public static void configureIncrementalLoad(Job job, Table table, RegionLocator regionLocator)<a name="line.583"></a>
+<span class="sourceLineNo">584</span>      throws IOException {<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    configureIncrementalLoad(job, table.getDescriptor(), regionLocator);<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  }<a name="line.586"></a>
+<span class="sourceLineNo">587</span><a name="line.587"></a>
+<span class="sourceLineNo">588</span>  /**<a name="line.588"></a>
+<span class="sourceLineNo">589</span>   * Configure a MapReduce Job to perform an incremental load into the given<a name="line.589"></a>
+<span class="sourceLineNo">590</span>   * table. This<a name="line.590"></a>
+<span class="sourceLineNo">591</span>   * &lt;ul&gt;<a name="line.591"></a>
+<span class="sourceLineNo">592</span>   *   &lt;li&gt;Inspects the table to configure a total order partitioner&lt;/li&gt;<a name="line.592"></a>
+<span class="sourceLineNo">593</span>   *   &lt;li&gt;Uploads the partitions file to the cluster and adds it to the DistributedCache&lt;/li&gt;<a name="line.593"></a>
+<span class="sourceLineNo">594</span>   *   &lt;li&gt;Sets the number of reduce tasks to match the current number of regions&lt;/li&gt;<a name="line.594"></a>
+<span class="sourceLineNo">595</span>   *   &lt;li&gt;Sets the output key/value class to match HFileOutputFormat2's requirements&lt;/li&gt;<a name="line.595"></a>
+<span class="sourceLineNo">596</span>   *   &lt;li&gt;Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or<a name="line.596"></a>
+<span class="sourceLineNo">597</span>   *     PutSortReducer)&lt;/li&gt;<a name="line.597"></a>
+<span class="sourceLineNo">598</span>   * &lt;/ul&gt;<a name="line.598"></a>
+<span class="sourceLineNo">599</span>   * The user should be sure to set the map output value class to either KeyValue or Put before<a name="line.599"></a>
+<span class="sourceLineNo">600</span>   * running this function.<a name="line.600"></a>
+<span class="sourceLineNo">601</span>   */<a name="line.601"></a>
+<span class="sourceLineNo">602</span>  public static void configureIncrementalLoad(Job job, TableDescriptor tableDescriptor,<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      RegionLocator regionLocator) throws IOException {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    ArrayList&lt;TableInfo&gt; singleTableInfo = new ArrayList&lt;&gt;();<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    singleTableInfo.add(new TableInfo(tableDescriptor, regionLocator));<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    configureIncrementalLoad(job, singleTableInfo, HFileOutputFormat2.class);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>  }<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>  static void configureIncrementalLoad(Job job, List&lt;TableInfo&gt; multiTableInfo,<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      Class&lt;? extends OutputFormat&lt;?, ?&gt;&gt; cls) throws IOException {<a name="line.610"></a>
+<span class="sourceLineNo">611</span>    Configuration conf = job.getConfiguration();<a name="line.611"></a>
+<span class="sourceLineNo">612</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    job.setOutputFormatClass(cls);<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>    if (multiTableInfo.stream().distinct().count() != multiTableInfo.size()) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      throw new IllegalArgumentException("Duplicate entries found in TableInfo argument");<a name="line.617"></a>
+<span class="sourceLineNo">618</span>    }<a name="line.618"></a>
+<span class="sourceLineNo">619</span>    boolean writeMultipleTables = false;<a name="line.619"></a>
+<span class="sourceLineNo">620</span>    if (MultiTableHFileOutputFormat.class.equals(cls)) {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      writeMultipleTables = true;<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      conf.setBoolean(MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, true);<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    // Based on the configured map output class, set the correct reducer to properly<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    // sort the incoming values.<a name="line.625"></a>
+<span class="sourceLineNo">626</span>    // TODO it would be nice to pick one or the other of these formats.<a name="line.626"></a>
+<span class="sourceLineNo">627</span>    if (KeyValue.class.equals(job.getMapOutputValueClass())<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        || MapReduceExtendedCell.class.equals(job.getMapOutputValueClass())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      job.setReducerClass(CellSortReducer.class);<a name="line.629"></a>
+<span class="sourceLineNo">630</span>    } else if (Put.class.equals(job.getMapOutputValueClass())) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>      job.setReducerClass(PutSortReducer.class);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>    } else if (Text.class.equals(job.getMapOutputValueClass())) {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      job.setReducerClass(TextSortReducer.class);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    } else {<a name="line.634"></a>
+<span class="sourceLineNo">635</span>      LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass());<a name="line.635"></a>
+<span class="sourceLineNo">636</span>    }<a name="line.636"></a>
+<span class="sourceLineNo">637</span><a name="line.637"></a>
+<span class="sourceLineNo">638</span>    conf.setStrings("io.serializations", conf.get("io.serializations"),<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        MutationSerialization.class.getName(), ResultSerialization.class.getName(),<a name="line.639"></a>
+<span class="sourceLineNo">640</span>        CellSerialization.class.getName());<a name="line.640"></a>
+<span class="sourceLineNo">641</span><a name="line.641"></a>
+<span class="sourceLineNo">642</span>    if (conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {<a name="line.642"></a>
+<span class="sourceLineNo">643</span>      LOG.info("bulkload locality sensitive enabled");<a name="line.643"></a>
+<span class="sourceLineNo">644</span>    }<a name="line.644"></a>
+<span class="sourceLineNo">645</span><a name="line.645"></a>
+<span class="sourceLineNo">646</span>    /* Now get the region start keys for every table required */<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    List&lt;String&gt; allTableNames = new ArrayList&lt;&gt;(multiTableInfo.size());<a name="line.647"></a>
+<span class="sourceLineNo">648</span>    List&lt;RegionLocator&gt; regionLocators = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.648"></a>
+<span class="sourceLineNo">649</span>    List&lt;TableDescriptor&gt; tableDescriptors = new ArrayList&lt;&gt;( multiTableInfo.size());<a name="line.649"></a>
+<span class="sourceLineNo">650</span><a name="line.650"></a>
+<span class="sourceLineNo">651</span>    for( TableInfo tableInfo : multiTableInfo )<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    {<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      regionLocators.add(tableInfo.getRegionLocator());<a name="line.653"></a>
+<span class="sourceLineNo">654</span>      String tn = writeMultipleTables?<a name="line.654"></a>
+<span class="sourceLineNo">655</span>        tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():<a name="line.655"></a>
+<span class="sourceLineNo">656</span>        tableInfo.getRegionLocator().getName().getNameAsString();<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      allTableNames.add(tn);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      tableDescriptors.add(tableInfo.getTableDescriptor());<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>    // Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table<a name="line.660"></a>
+<span class="sourceLineNo">661</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, StringUtils.join(allTableNames, Bytes<a name="line.661"></a>
+<span class="sourceLineNo">662</span>            .toString(tableSeparator)));<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    List&lt;ImmutableBytesWritable&gt; startKeys = getRegionStartKeys(regionLocators, writeMultipleTables);<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    // Use table's region boundaries for TOP split points.<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    LOG.info("Configuring " + startKeys.size() + " reduce partitions " +<a name="line.665"></a>
+<span class="sourceLineNo">666</span>        "to match current region count for all tables");<a name="line.666"></a>
+<span class="sourceLineNo">667</span>    job.setNumReduceTasks(startKeys.size());<a name="line.667"></a>
+<span class="sourceLineNo">668</span><a name="line.668"></a>
+<span class="sourceLineNo">669</span>    configurePartitioner(job, startKeys, writeMultipleTables);<a name="line.669"></a>
+<span class="sourceLineNo">670</span>    // Set compression algorithms based on column families<a name="line.670"></a>
+<span class="sourceLineNo">671</span><a name="line.671"></a>
+<span class="sourceLineNo">672</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(compressionDetails,<a name="line.672"></a>
+<span class="sourceLineNo">673</span>            tableDescriptors));<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.674"></a>
 <span class="sourceLineNo">675</span>            tableDescriptors));<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(blockSizeDetails,<a name="line.676"></a>
+<span class="sourceLineNo">676</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.676"></a>
 <span class="sourceLineNo">677</span>            tableDescriptors));<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomTypeDetails,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>            tableDescriptors));<a name="line.679"></a>
-<span class="sourceLineNo">680</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        tableDescriptors));<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.682"></a>
-<span class="sourceLineNo">683</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.683"></a>
-<span class="sourceLineNo">684</span><a name="line.684"></a>
-<span class="sourceLineNo">685</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    TableMapReduceUtil.initCredentials(job);<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.687"></a>
-<span class="sourceLineNo">688</span>  }<a name="line.688"></a>
-<span class="sourceLineNo">689</span><a name="line.689"></a>
-<span class="sourceLineNo">690</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      IOException {<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    Configuration conf = job.getConfiguration();<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.699"></a>
-<span class="sourceLineNo">700</span><a name="line.700"></a>
-<span class="sourceLineNo">701</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    // Set compression algorithms based on column families<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    conf.set(COMPRESSION_FAMILIES_CONF_KEY,<a name="line.703"></a>
-<span class="sourceLineNo">704</span>        serializeColumnFamilyAttribute(compressionDetails, singleTableDescriptor));<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    conf.set(BLOCK_SIZE_FAMILIES_CONF_KEY,<a name="line.705"></a>
-<span class="sourceLineNo">706</span>        serializeColumnFamilyAttribute(blockSizeDetails, singleTableDescriptor));<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    conf.set(BLOOM_TYPE_FAMILIES_CONF_KEY,<a name="line.707"></a>
-<span class="sourceLineNo">708</span>        serializeColumnFamilyAttribute(bloomTypeDetails, singleTableDescriptor));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY,<a name="line.709"></a>
-<span class="sourceLineNo">710</span>        serializeColumnFamilyAttribute(bloomParamDetails, singleTableDescriptor));<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.711"></a>
-<span class="sourceLineNo">712</span>        serializeColumnFamilyAttribute(dataBlockEncodingDetails, singleTableDescriptor));<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    TableMapReduceUtil.initCredentials(job);<a name="line.715"></a>
-<span class="sourceLineNo">716</span>    LOG.info("Incremental table " + tableDescriptor.getTableName() + " output configured.");<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  }<a name="line.717"></a>
-<span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  /**<a name="line.719"></a>
-<span class="sourceLineNo">720</span>   * Runs inside the task to deserialize column family to compression algorithm<a name="line.720"></a>
-<span class="sourceLineNo">721</span>   * map from the configuration.<a name="line.721"></a>
-<span class="sourceLineNo">722</span>   *<a name="line.722"></a>
-<span class="sourceLineNo">723</span>   * @param conf to read the serialized values from<a name="line.723"></a>
-<span class="sourceLineNo">724</span>   * @return a map from column family to the configured compression algorithm<a name="line.724"></a>
-<span class="sourceLineNo">725</span>   */<a name="line.725"></a>
-<span class="sourceLineNo">726</span>  @VisibleForTesting<a name="line.726"></a>
-<span class="sourceLineNo">727</span>  static Map&lt;byte[], Algorithm&gt; createFamilyCompressionMap(Configuration<a name="line.727"></a>
-<span class="sourceLineNo">728</span>      conf) {<a name="line.728"></a>
-<span class="sourceLineNo">729</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.729"></a>
-<span class="sourceLineNo">730</span>        COMPRESSION_FAMILIES_CONF_KEY);<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    Map&lt;byte[], Algorithm&gt; compressionMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.731"></a>
-<span class="sourceLineNo">732</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      Algorithm algorithm = HFileWriterImpl.compressionByName(e.getValue());<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      compressionMap.put(e.getKey(), algorithm);<a name="line.734"></a>
-<span class="sourceLineNo">735</span>    }<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return compressionMap;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  /**<a name="line.739"></a>
-<span class="sourceLineNo">740</span>   * Runs inside the task to deserialize column family to bloom filter type<a name="line.740"></a>
-<span class="sourceLineNo">741</span>   * map from the configuration.<a name="line.741"></a>
-<span class="sourceLineNo">742</span>   *<a name="line.742"></a>
-<span class="sourceLineNo">743</span>   * @param conf to read the serialized values from<a name="line.743"></a>
-<span class="sourceLineNo">744</span>   * @return a map from column family to the the configured bloom filter type<a name="line.744"></a>
-<span class="sourceLineNo">745</span>   */<a name="line.745"></a>
-<span class="sourceLineNo">746</span>  @VisibleForTesting<a name="line.746"></a>
-<span class="sourceLineNo">747</span>  static Map&lt;byte[], BloomType&gt; createFamilyBloomTypeMap(Configuration conf) {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        BLOOM_TYPE_FAMILIES_CONF_KEY);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    Map&lt;byte[], BloomType&gt; bloomTypeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      BloomType bloomType = BloomType.valueOf(e.getValue());<a name="line.752"></a>
-<span class="sourceLineNo">753</span>      bloomTypeMap.put(e.getKey(), bloomType);<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    }<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    return bloomTypeMap;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>  }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>  /**<a name="line.758"></a>
-<span class="sourceLineNo">759</span>   * Runs inside the task to deserialize column family to bloom filter param<a name="line.759"></a>
-<span class="sourceLineNo">760</span>   * map from the configuration.<a name="line.760"></a>
-<span class="sourceLineNo">761</span>   *<a name="line.761"></a>
-<span class="sourceLineNo">762</span>   * @param conf to read the serialized values from<a name="line.762"></a>
-<span class="sourceLineNo">763</span>   * @return a map from column family to the the configured bloom filter param<a name="line.763"></a>
-<span class="sourceLineNo">764</span>   */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>  @VisibleForTesting<a name="line.765"></a>
-<span class="sourceLineNo">766</span>  static Map&lt;byte[], String&gt; createFamilyBloomParamMap(Configuration conf) {<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    return createFamilyConfValueMap(conf, BLOOM_PARAM_FAMILIES_CONF_KEY);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>  }<a name="line.768"></a>
-<span class="sourceLineNo">769</span><a name="line.769"></a>
-<span class="sourceLineNo">770</span><a name="line.770"></a>
-<span class="sourceLineNo">771</span>  /**<a name="line.771"></a>
-<span class="sourceLineNo">772</span>   * Runs inside the task to deserialize column family to block size<a name="line.772"></a>
-<span class="sourceLineNo">773</span>   * map from the configuration.<a name="line.773"></a>
-<span class="sourceLineNo">774</span>   *<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * @param conf to read the serialized values from<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * @return a map from column family to the configured block size<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   */<a name="line.777"></a>
-<span class="sourceLineNo">778</span>  @VisibleForTesting<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  static Map&lt;byte[], Integer&gt; createFamilyBlockSizeMap(Configuration conf) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.780"></a>
-<span class="sourceLineNo">781</span>        BLOCK_SIZE_FAMILIES_CONF_KEY);<a name="line.781"></a>
-<span class="sourceLineNo">782</span>    Map&lt;byte[], Integer&gt; blockSizeMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.783"></a>
-<span class="sourceLineNo">784</span>      Integer blockSize = Integer.parseInt(e.getValue());<a name="line.784"></a>
-<span class="sourceLineNo">785</span>      blockSizeMap.put(e.getKey(), blockSize);<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    return blockSizeMap;<a name="line.787"></a>
-<span class="sourceLineNo">788</span>  }<a name="line.788"></a>
-<span class="sourceLineNo">789</span><a name="line.789"></a>
-<span class="sourceLineNo">790</span>  /**<a name="line.790"></a>
-<span class="sourceLineNo">791</span>   * Runs inside the task to deserialize column family to data block encoding<a name="line.791"></a>
-<span class="sourceLineNo">792</span>   * type map from the configuration.<a name="line.792"></a>
-<span class="sourceLineNo">793</span>   *<a name="line.793"></a>
-<span class="sourceLineNo">794</span>   * @param conf to read the serialized values from<a name="line.794"></a>
-<span class="sourceLineNo">795</span>   * @return a map from column family to HFileDataBlockEncoder for the<a name="line.795"></a>
-<span class="sourceLineNo">796</span>   *         configured data block type for the family<a name="line.796"></a>
-<span class="sourceLineNo">797</span>   */<a name="line.797"></a>
-<span class="sourceLineNo">798</span>  @VisibleForTesting<a name="line.798"></a>
-<span class="sourceLineNo">799</span>  static Map&lt;byte[], DataBlockEncoding&gt; createFamilyDataBlockEncodingMap(<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      Configuration conf) {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    Map&lt;byte[], String&gt; stringMap = createFamilyConfValueMap(conf,<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        DATABLOCK_ENCODING_FAMILIES_CONF_KEY);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    Map&lt;byte[], DataBlockEncoding&gt; encoderMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    for (Map.Entry&lt;byte[], String&gt; e : stringMap.entrySet()) {<a name="line.804"></a>
-<span class="sourceLineNo">805</span>      encoderMap.put(e.getKey(), DataBlockEncoding.valueOf((e.getValue())));<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    }<a name="line.806"></a>
-<span class="sourceLineNo">807</span>    return encoderMap;<a name="line.807"></a>
-<span class="sourceLineNo">808</span>  }<a name="line.808"></a>
-<span class="sourceLineNo">809</span><a name="line.809"></a>
-<span class="sourceLineNo">810</span><a name="line.810"></a>
-<span class="sourceLineNo">811</span>  /**<a name="line.811"></a>
-<span class="sourceLineNo">812</span>   * Run inside the task to deserialize column family to given conf value map.<a name="line.812"></a>
-<span class="sourceLineNo">813</span>   *<a name="line.813"></a>
-<span class="sourceLineNo">814</span>   * @param conf to read the serialized values from<a name="line.814"></a>
-<span class="sourceLineNo">815</span>   * @param confName conf key to read from the configuration<a name="line.815"></a>
-<span class="sourceLineNo">816</span>   * @return a map of column family to the given configuration value<a name="line.816"></a>
-<span class="sourceLineNo">817</span>   */<a name="line.817"></a>
-<span class="sourceLineNo">818</span>  private static Map&lt;byte[], String&gt; createFamilyConfValueMap(<a name="line.818"></a>
-<span class="sourceLineNo">819</span>      Configuration conf, String confName) {<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    Map&lt;byte[], String&gt; confValMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    String confVal = conf.get(confName, "");<a name="line.821"></a>
-<span class="sourceLineNo">822</span>    for (String familyConf : confVal.split("&amp;")) {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      String[] familySplit = familyConf.split("=");<a name="line.823"></a>
-<span class="sourceLineNo">824</span>      if (familySplit.length != 2) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>        continue;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>      }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>      try {<a name="line.827"></a>
-<span class="sourceLineNo">828</span>        confValMap.put(Bytes.toBytes(URLDecoder.decode(familySplit[0], "UTF-8")),<a name="line.828"></a>
-<span class="sourceLineNo">829</span>            URLDecoder.decode(familySplit[1], "UTF-8"));<a name="line.829"></a>
-<span class="sourceLineNo">830</span>      } catch (UnsupportedEncodingException e) {<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        // will not happen with UTF-8 encoding<a name="line.831"></a>
-<span class="sourceLineNo">832</span>        throw new AssertionError(e);<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>    }<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    return confValMap;<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * Configure &lt;code&gt;job&lt;/code&gt; with a TotalOrderPartitioner, partitioning against<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * &lt;code&gt;splitPoints&lt;/code&gt;. Cleans up the partitions file after job exists.<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   */<a name="line.841"></a>
-<span class="sourceLineNo">842</span>  static void configurePartitioner(Job job, List&lt;ImmutableBytesWritable&gt; splitPoints, boolean<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          writeMultipleTables)<a name="line.843"></a>
-<span class="sourceLineNo">844</span>      throws IOException {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>    Configuration conf = job.getConfiguration();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>    // create the partitions file<a name="line.846"></a>
-<span class="sourceLineNo">847</span>    FileSystem fs = FileSystem.get(conf);<a name="line.847"></a>
-<span class="sourceLineNo">848</span>    String hbaseTmpFsDir =<a name="line.848"></a>
-<span class="sourceLineNo">849</span>        conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            fs.getHomeDirectory() + "/hbase-staging");<a name="line.850"></a>
-<span class="sourceLineNo">851</span>    Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());<a name="line.851"></a>
-<span class="sourceLineNo">852</span>    fs.makeQualified(partitionsPath);<a name="line.852"></a>
-<span class="sourceLineNo">853</span>    writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);<a name="line.853"></a>
-<span class="sourceLineNo">854</span>    fs.deleteOnExit(partitionsPath);<a name="line.854"></a>
-<span class="sourceLineNo">855</span><a name="line.855"></a>
-<span class="sourceLineNo">856</span>    // configure job to use it<a name="line.856"></a>
-<span class="sourceLineNo">857</span>    job.setPartitionerClass(TotalOrderPartitioner.class);<a name="line.857"></a>
-<span class="sourceLineNo">858</span>    TotalOrderPartitioner.setPartitionFile(conf, partitionsPath);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
-<span class="sourceLineNo">860</span><a name="line.860"></a>
-<span class="sourceLineNo">861</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")<a name="line.861"></a>
-<span class="sourceLineNo">862</span>  @VisibleForTesting<a name="line.862"></a>
-<span class="sourceLineNo">863</span>  static String serializeColumnFamilyAttribute(Function&lt;ColumnFamilyDescriptor, String&gt; fn, List&lt;TableDescriptor&gt; allTables)<a name="line.863"></a>
-<span class="sourceLineNo">864</span>      throws UnsupportedEncodingException {<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    StringBuilder attributeValue = new StringBuilder();<a name="line.865"></a>
-<span class="sourceLineNo">866</span>    int i = 0;<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    for (TableDescriptor tableDescriptor : allTables) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      if (tableDescriptor == null) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>        // could happen with mock table instance<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        // CODEREVIEW: Can I set an empty string in conf if mock table instance?<a name="line.870"></a>
-<span class="sourceLineNo">871</span>        return "";<a name="line.871"></a>
-<span class="sourceLineNo">872</span>      }<a name="line.872"></a>
-<span class="sourceLineNo">873</span>      for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>        if (i++ &gt; 0) {<a name="line.874"></a>
-<span class="sourceLineNo">875</span>          attributeValue.append('&amp;');<a name="line.875"></a>
-<span class="sourceLineNo">876</span>        }<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        attributeValue.append(URLEncoder.encode(<a name="line.877"></a>
-<span class="sourceLineNo">878</span>            Bytes.toString(combineTableNameSuffix(tableDescriptor.getTableName().getName(), familyDescriptor.getName())),<a name="line.878"></a>
-<span class="sourceLineNo">879</span>            "UTF-8"));<a name="line.879"></a>
-<span class="sourceLineNo">880</span>        attributeValue.append('=');<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        attributeValue.append(URLEncoder.encode(fn.apply(familyDescriptor), "UTF-8"));<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      }<a name="line.882"></a>
-<span class="sourceLineNo">883</span>    }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>    // Get rid of the last ampersand<a name="line.884"></a>
-<span class="sourceLineNo">885</span>    return attributeValue.toString();<a name="line.885"></a>
-<span class="sourceLineNo">886</span>  }<a name="line.886"></a>
-<span class="sourceLineNo">887</span><a name="line.887"></a>
-<span class="sourceLineNo">888</span>  /**<a name="line.888"></a>
-<span class="sourceLineNo">889</span>   * Serialize column family to compression algorithm map to configuration.<a name="line.889"></a>
-<span class="sourceLineNo">890</span>   * Invoked while configuring the MR job for incremental load.<a name="line.890"></a>
-<span class="sourceLineNo">891</span>   *<a name="line.891"></a>
-<span class="sourceLineNo">892</span>   * @param tableDescriptor to read the properties from<a name="line.892"></a>
-<span class="sourceLineNo">893</span>   * @param conf to persist serialized values into<a name="line.893"></a>
-<span class="sourceLineNo">894</span>   * @throws IOException<a name="line.894"></a>
-<span class="sourceLineNo">895</span>   *           on failure to read column family descriptors<a name="line.895"></a>
-<span class="sourceLineNo">896</span>   */<a name="line.896"></a>
-<span class="sourceLineNo">897</span>  @VisibleForTesting<a name="line.897"></a>
-<span class="sourceLineNo">898</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; compressionDetails = familyDescriptor -&gt;<a name="line.898"></a>
-<span class="sourceLineNo">899</span>          familyDescriptor.getCompressionType().getName();<a name="line.899"></a>
-<span class="sourceLineNo">900</span><a name="line.900"></a>
-<span class="sourceLineNo">901</span>  /**<a name="line.901"></a>
-<span class="sourceLineNo">902</span>   * Serialize column family to block size map to configuration. Invoked while<a name="line.902"></a>
-<span class="sourceLineNo">903</span>   * configuring the MR job for incremental load.<a name="line.903"></a>
-<span class="sourceLineNo">904</span>   *<a name="line.904"></a>
-<span class="sourceLineNo">905</span>   * @param tableDescriptor<a name="line.905"></a>
-<span class="sourceLineNo">906</span>   *          to read the properties from<a name="line.906"></a>
-<span class="sourceLineNo">907</span>   * @param conf<a name="line.907"></a>
-<span class="sourceLineNo">908</span>   *          to persist serialized values into<a name="line.908"></a>
-<span class="sourceLineNo">909</span>   *<a name="line.909"></a>
-<span class="sourceLineNo">910</span>   * @throws IOException<a name="line.910"></a>
-<span class="sourceLineNo">911</span>   *           on failure to read column family descriptors<a name="line.911"></a>
-<span class="sourceLineNo">912</span>   */<a name="line.912"></a>
-<span class="sourceLineNo">913</span>  @VisibleForTesting<a name="line.913"></a>
-<span class="sourceLineNo">914</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; blockSizeDetails = familyDescriptor -&gt; String<a name="line.914"></a>
-<span class="sourceLineNo">915</span>          .valueOf(familyDescriptor.getBlocksize());<a name="line.915"></a>
-<span class="sourceLineNo">916</span><a name="line.916"></a>
-<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   * Serialize column family to bloom type map to configuration. Invoked while<a name="line.918"></a>
-<span class="sourceLineNo">919</span>   * configuring the MR job for incremental load.<a name="line.919"></a>
-<span class="sourceLineNo">920</span>   *<a name="line.920"></a>
-<span class="sourceLineNo">921</span>   * @param tableDescriptor<a name="line.921"></a>
-<span class="sourceLineNo">922</span>   *          to read the properties from<a name="line.922"></a>
-<span class="sourceLineNo">923</span>   * @param conf<a name="line.923"></a>
-<span class="sourceLineNo">924</span>   *          to persist serialized values into<a name="line.924"></a>
-<span class="sourceLineNo">925</span>   *<a name="line.925"></a>
-<span class="sourceLineNo">926</span>   * @throws IOException<a name="line.926"></a>
-<span class="sourceLineNo">927</span>   *           on failure to read column family descriptors<a name="line.927"></a>
-<span class="sourceLineNo">928</span>   */<a name="line.928"></a>
-<span class="sourceLineNo">929</span>  @VisibleForTesting<a name="line.929"></a>
-<span class="sourceLineNo">930</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomTypeDetails = familyDescriptor -&gt; {<a name="line.930"></a>
-<span class="sourceLineNo">931</span>    String bloomType = familyDescriptor.getBloomFilterType().toString();<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    if (bloomType == null) {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>      bloomType = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();<a name="line.933"></a>
-<span class="sourceLineNo">934</span>    }<a name="line.934"></a>
-<span class="sourceLineNo">935</span>    return bloomType;<a name="line.935"></a>
-<span class="sourceLineNo">936</span>  };<a name="line.936"></a>
-<span class="sourceLineNo">937</span><a name="line.937"></a>
-<span class="sourceLineNo">938</span>  /**<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   * Serialize column family to bloom param map to configuration. Invoked while<a name="line.939"></a>
-<span class="sourceLineNo">940</span>   * configuring the MR job for incremental load.<a name="line.940"></a>
-<span class="sourceLineNo">941</span>   *<a name="line.941"></a>
-<span class="sourceLineNo">942</span>   * @param tableDescriptor<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   *          to read the properties from<a name="line.943"></a>
-<span class="sourceLineNo">944</span>   * @param conf<a name="line.944"></a>
-<span class="sourceLineNo">945</span>   *          to persist serialized values into<a name="line.945"></a>
-<span class="sourceLineNo">946</span>   *<a name="line.946"></a>
-<span class="sourceLineNo">947</span>   * @throws IOException<a name="line.947"></a>
-<span class="sourceLineNo">948</span>   *           on failure to read column family descriptors<a name="line.948"></a>
-<span class="sourceLineNo">949</span>   */<a name="line.949"></a>
-<span class="sourceLineNo">950</span>  @VisibleForTesting<a name="line.950"></a>
-<span class="sourceLineNo">951</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; bloomParamDetails = familyDescriptor -&gt; {<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    BloomType bloomType = familyDescriptor.getBloomFilterType();<a name="line.952"></a>
-<span class="sourceLineNo">953</span>    String bloomParam = "";<a name="line.953"></a>
-<span class="sourceLineNo">954</span>    if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY);<a name="line.955"></a>
-<span class="sourceLineNo">956</span>    } else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.DELIMITER_KEY);<a name="line.957"></a>
-<span class="sourceLineNo">958</span>    }<a name="line.958"></a>
-<span class="sourceLineNo">959</span>    return bloomParam;<a name="line.959"></a>
-<span class="sourceLineNo">960</span>  };<a name="line.960"></a>
-<span class="sourceLineNo">961</span><a name="line.961"></a>
-<span class="sourceLineNo">962</span>  /**<a name="line.962"></a>
-<span class="sourceLineNo">963</span>   * Serialize column family to data block encoding map to configuration.<a name="line.963"></a>
-<span class="sourceLineNo">964</span>   * Invoked while configuring the MR job for incremental load.<a name="line.964"></a>
-<span class="sourceLineNo">965</span>   *<a name="line.965"></a>
-<span class="sourceLineNo">966</span>   * @param tableDescriptor<a name="line.966"></a>
-<span class="sourceLineNo">967</span>   *          to read the properties from<a name="line.967"></a>
-<span class="sourceLineNo">968</span>   * @param conf<a name="line.968"></a>
-<span class="sourceLineNo">969</span>   *          to persist serialized values into<a name="line.969"></a>
-<span class="sourceLineNo">970</span>   * @throws IOException<a name="line.970"></a>
-<span class="sourceLineNo">971</span>   *           on failure to read column family descriptors<a name="line.971"></a>
-<span class="sourceLineNo">972</span>   */<a name="line.972"></a>
-<span class="sourceLineNo">973</span>  @VisibleForTesting<a name="line.973"></a>
-<span class="sourceLineNo">974</span>  static Function&lt;ColumnFamilyDescriptor, String&gt; dataBlockEncodingDetails = familyDescriptor -&gt; {<a name="line.974"></a>
-<span class="sourceLineNo">975</span>    DataBlockEncoding encoding = familyDescriptor.getDataBlockEncoding();<a name="line.975"></a>
-<span class="sourceLineNo">976</span>    if (encoding == null) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>      encoding = DataBlockEncoding.NONE;<a name="line.977"></a>
-<span class="sourceLineNo">978</span>    }<a name="line.978"></a>
-<span class="sourceLineNo">979</span>    return encoding.toString();<a name="line.979"></a>
-<span class="sourceLineNo">980</span>  };<a name="line.980"></a>
-<span class="sourceLineNo">981</span><a name="line.981"></a>
-<span class="sourceLineNo">982</span>}<a name="line.982"></a>
+<span class="sourceLineNo">678</span>    conf.set(BLOOM_PARAM_FAMILIES_CONF_KEY, serializeColumnFamilyAttribute(bloomParamDetails,<a name="line.678"></a>
+<span class="sourceLineNo">679</span>        tableDescriptors));<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    conf.set(DATABLOCK_ENCODING_FAMILIES_CONF_KEY,<a name="line.680"></a>
+<span class="sourceLineNo">681</span>            serializeColumnFamilyAttribute(dataBlockEncodingDetails, tableDescriptors));<a name="line.681"></a>
+<span class="sourceLineNo">682</span><a name="line.682"></a>
+<span class="sourceLineNo">683</span>    TableMapReduceUtil.addDependencyJars(job);<a name="line.683"></a>
+<span class="sourceLineNo">684</span>    TableMapReduceUtil.initCredentials(job);<a name="line.684"></a>
+<span class="sourceLineNo">685</span>    LOG.info("Incremental output configured for tables: " + StringUtils.join(allTableNames, ","));<a name="line.685"></a>
+<span class="sourceLineNo">686</span>  }<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>  public static void configureIncrementalLoadMap(Job job, TableDescriptor tableDescriptor) throws<a name="line.688"></a>
+<span class="sourceLineNo">689</span>      IOException {<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    Configuration conf = job.getConfiguration();<a name="line.690"></a>
+<span class="sourceLineNo">691</span><a name="line.691"></a>
+<span class="sourceLineNo">692</span>    job.setOutputKeyClass(ImmutableBytesWritable.class);<a name="line.692"></a>
+<span class="sourceLineNo">693</span>    job.setOutputValueClass(MapReduceExtendedCell.class);<a name="line.693"></a>
+<span class="sourceLineNo">694</span>    job.setOutputFormatClass(HFileOutputFormat2.class);<a name="line.694"></a>
+<span class="sourceLineNo">695</span><a name="line.695"></a>
+<span class="sourceLineNo">696</span>    ArrayList&lt;TableDescriptor&gt; singleTableDescriptor = new ArrayList&lt;&gt;(1);<a name="line.696"></a>
+<span class="sourceLineNo">697</span>    singleTableDescriptor.add(tableDescriptor);<a name="line.697"></a>
+<span class="sourceLineNo">698</span><a name="line.698"></a>
+<span class="sourceLineNo">699</span>    conf.set(OUTPUT_TABLE_NAME_CONF_KEY, tableDescriptor.getTableName().getNameAsString());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    // Set compression algorithms based on column families<a name="line.700"></a>
+<span class="sourceLineNo">701</span>    conf.set(COMP

<TRUNCATED>

[11/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionBoundariesInformation.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourc

<TRUNCATED>

[02/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="line.1543"></a>
-<span class="sourceLineNo">1544</span>    Configuratio

<TRUNCATED>

[17/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HBaseFsckTool.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="l

<TRUNCATED>

[22/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.BulkHFileVisitor.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.BulkHFileVisitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.BulkHFileVisitor.html
index 46ffa67..c192dce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.BulkHFileVisitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.BulkHFileVisitor.html
@@ -718,7 +718,7 @@
 <span class="sourceLineNo">710</span>    Path hfilePath = item.getFilePath();<a name="line.710"></a>
 <span class="sourceLineNo">711</span>    Optional&lt;byte[]&gt; first, last;<a name="line.711"></a>
 <span class="sourceLineNo">712</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      new CacheConfig(getConf()), true, getConf())) {<a name="line.713"></a>
+<span class="sourceLineNo">713</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.713"></a>
 <span class="sourceLineNo">714</span>      hfr.loadFileInfo();<a name="line.714"></a>
 <span class="sourceLineNo">715</span>      first = hfr.getFirstRowKey();<a name="line.715"></a>
 <span class="sourceLineNo">716</span>      last = hfr.getLastRowKey();<a name="line.716"></a>
@@ -855,7 +855,7 @@
 <span class="sourceLineNo">847</span>          throws IOException {<a name="line.847"></a>
 <span class="sourceLineNo">848</span>        Path hfile = hfileStatus.getPath();<a name="line.848"></a>
 <span class="sourceLineNo">849</span>        try (HFile.Reader reader =<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            HFile.createReader(fs, hfile, new CacheConfig(getConf()), true, getConf())) {<a name="line.850"></a>
+<span class="sourceLineNo">850</span>            HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.850"></a>
 <span class="sourceLineNo">851</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.851"></a>
 <span class="sourceLineNo">852</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.852"></a>
 <span class="sourceLineNo">853</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.853"></a>
@@ -1091,7 +1091,7 @@
 <span class="sourceLineNo">1083</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.1083"></a>
 <span class="sourceLineNo">1084</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.1084"></a>
 <span class="sourceLineNo">1085</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.1086"></a>
+<span class="sourceLineNo">1086</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.1086"></a>
 <span class="sourceLineNo">1087</span>    HalfStoreFileReader halfReader = null;<a name="line.1087"></a>
 <span class="sourceLineNo">1088</span>    StoreFileWriter halfWriter = null;<a name="line.1088"></a>
 <span class="sourceLineNo">1089</span>    try {<a name="line.1089"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
index 46ffa67..c192dce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.LoadQueueItem.html
@@ -718,7 +718,7 @@
 <span class="sourceLineNo">710</span>    Path hfilePath = item.getFilePath();<a name="line.710"></a>
 <span class="sourceLineNo">711</span>    Optional&lt;byte[]&gt; first, last;<a name="line.711"></a>
 <span class="sourceLineNo">712</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      new CacheConfig(getConf()), true, getConf())) {<a name="line.713"></a>
+<span class="sourceLineNo">713</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.713"></a>
 <span class="sourceLineNo">714</span>      hfr.loadFileInfo();<a name="line.714"></a>
 <span class="sourceLineNo">715</span>      first = hfr.getFirstRowKey();<a name="line.715"></a>
 <span class="sourceLineNo">716</span>      last = hfr.getLastRowKey();<a name="line.716"></a>
@@ -855,7 +855,7 @@
 <span class="sourceLineNo">847</span>          throws IOException {<a name="line.847"></a>
 <span class="sourceLineNo">848</span>        Path hfile = hfileStatus.getPath();<a name="line.848"></a>
 <span class="sourceLineNo">849</span>        try (HFile.Reader reader =<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            HFile.createReader(fs, hfile, new CacheConfig(getConf()), true, getConf())) {<a name="line.850"></a>
+<span class="sourceLineNo">850</span>            HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.850"></a>
 <span class="sourceLineNo">851</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.851"></a>
 <span class="sourceLineNo">852</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.852"></a>
 <span class="sourceLineNo">853</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.853"></a>
@@ -1091,7 +1091,7 @@
 <span class="sourceLineNo">1083</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.1083"></a>
 <span class="sourceLineNo">1084</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.1084"></a>
 <span class="sourceLineNo">1085</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.1086"></a>
+<span class="sourceLineNo">1086</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.1086"></a>
 <span class="sourceLineNo">1087</span>    HalfStoreFileReader halfReader = null;<a name="line.1087"></a>
 <span class="sourceLineNo">1088</span>    StoreFileWriter halfWriter = null;<a name="line.1088"></a>
 <span class="sourceLineNo">1089</span>    try {<a name="line.1089"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.html b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.html
index 46ffa67..c192dce 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/LoadIncrementalHFiles.html
@@ -718,7 +718,7 @@
 <span class="sourceLineNo">710</span>    Path hfilePath = item.getFilePath();<a name="line.710"></a>
 <span class="sourceLineNo">711</span>    Optional&lt;byte[]&gt; first, last;<a name="line.711"></a>
 <span class="sourceLineNo">712</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.712"></a>
-<span class="sourceLineNo">713</span>      new CacheConfig(getConf()), true, getConf())) {<a name="line.713"></a>
+<span class="sourceLineNo">713</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.713"></a>
 <span class="sourceLineNo">714</span>      hfr.loadFileInfo();<a name="line.714"></a>
 <span class="sourceLineNo">715</span>      first = hfr.getFirstRowKey();<a name="line.715"></a>
 <span class="sourceLineNo">716</span>      last = hfr.getLastRowKey();<a name="line.716"></a>
@@ -855,7 +855,7 @@
 <span class="sourceLineNo">847</span>          throws IOException {<a name="line.847"></a>
 <span class="sourceLineNo">848</span>        Path hfile = hfileStatus.getPath();<a name="line.848"></a>
 <span class="sourceLineNo">849</span>        try (HFile.Reader reader =<a name="line.849"></a>
-<span class="sourceLineNo">850</span>            HFile.createReader(fs, hfile, new CacheConfig(getConf()), true, getConf())) {<a name="line.850"></a>
+<span class="sourceLineNo">850</span>            HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.850"></a>
 <span class="sourceLineNo">851</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.851"></a>
 <span class="sourceLineNo">852</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.852"></a>
 <span class="sourceLineNo">853</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.853"></a>
@@ -1091,7 +1091,7 @@
 <span class="sourceLineNo">1083</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.1083"></a>
 <span class="sourceLineNo">1084</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.1084"></a>
 <span class="sourceLineNo">1085</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.1086"></a>
+<span class="sourceLineNo">1086</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.1086"></a>
 <span class="sourceLineNo">1087</span>    HalfStoreFileReader halfReader = null;<a name="line.1087"></a>
 <span class="sourceLineNo">1088</span>    StoreFileWriter halfWriter = null;<a name="line.1088"></a>
 <span class="sourceLineNo">1089</span>    try {<a name="line.1089"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/CompressionTest.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/CompressionTest.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/CompressionTest.html
index f2b9ffc..70de800 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/CompressionTest.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/CompressionTest.html
@@ -142,7 +142,7 @@
 <span class="sourceLineNo">134</span>    writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));<a name="line.134"></a>
 <span class="sourceLineNo">135</span>    writer.close();<a name="line.135"></a>
 <span class="sourceLineNo">136</span>    Cell cc = null;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf);<a name="line.137"></a>
+<span class="sourceLineNo">137</span>    HFile.Reader reader = HFile.createReader(fs, path, CacheConfig.DISABLED, true, conf);<a name="line.137"></a>
 <span class="sourceLineNo">138</span>    try {<a name="line.138"></a>
 <span class="sourceLineNo">139</span>      reader.loadFileInfo();<a name="line.139"></a>
 <span class="sourceLineNo">140</span>      HFileScanner scanner = reader.getScanner(false, true);<a name="line.140"></a>


[41/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/d702fb71
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/d702fb71
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/d702fb71

Branch: refs/heads/asf-site
Commit: d702fb719ce0bb7376a6d44317a617bc8e1a21d3
Parents: b31c1d9
Author: jenkins <bu...@apache.org>
Authored: Wed Dec 12 14:52:06 2018 +0000
Committer: jenkins <bu...@apache.org>
Committed: Wed Dec 12 14:52:06 2018 +0000

----------------------------------------------------------------------
 acid-semantics.html                             |    4 +-
 apache_hbase_reference_guide.pdf                |    4 +-
 .../hbase/mapreduce/HFileOutputFormat2.html     |    6 +-
 .../hbase/mapreduce/HFileOutputFormat2.html     | 1126 ++-
 book.html                                       |    2 +-
 bulk-loads.html                                 |    4 +-
 checkstyle-aggregate.html                       |  282 +-
 coc.html                                        |    4 +-
 dependencies.html                               |    4 +-
 dependency-convergence.html                     |    4 +-
 dependency-info.html                            |    4 +-
 dependency-management.html                      |    4 +-
 devapidocs/constant-values.html                 |    4 +-
 .../io/hfile/HFile.CachingBlockReader.html      |    6 +-
 .../hadoop/hbase/io/hfile/HFile.FileInfo.html   |   74 +-
 .../hadoop/hbase/io/hfile/HFile.Reader.html     |   64 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.html |   30 +-
 .../HFileOutputFormat2.WriterLength.html        |    8 +-
 .../hbase/mapreduce/HFileOutputFormat2.html     |   40 +-
 ...BaseFsck.CheckRegionConsistencyWorkItem.html |   10 +-
 .../HBaseFsck.ErrorReporter.ERROR_CODE.html     |   80 +-
 .../hbase/util/HBaseFsck.ErrorReporter.html     |   30 +-
 .../hbase/util/HBaseFsck.HBaseFsckTool.html     |    6 +-
 .../hadoop/hbase/util/HBaseFsck.HbckInfo.html   |   56 +-
 .../hadoop/hbase/util/HBaseFsck.HdfsEntry.html  |   14 +-
 .../hadoop/hbase/util/HBaseFsck.MetaEntry.html  |   18 +-
 .../hbase/util/HBaseFsck.OnlineEntry.html       |   10 +-
 .../util/HBaseFsck.PrintingErrorReporter.html   |   42 +-
 .../util/HBaseFsck.RegionRepairException.html   |    8 +-
 .../HBaseFsck.TableInfo.HDFSIntegrityFixer.html |   22 +-
 ...aseFsck.TableInfo.IntegrityFixSuggester.html |   20 +-
 .../hadoop/hbase/util/HBaseFsck.TableInfo.html  |   38 +-
 .../hbase/util/HBaseFsck.WorkItemHdfsDir.html   |   12 +-
 .../util/HBaseFsck.WorkItemHdfsRegionInfo.html  |   12 +-
 .../util/HBaseFsck.WorkItemOverlapMerge.html    |   10 +-
 .../hbase/util/HBaseFsck.WorkItemRegion.html    |   16 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.html |  250 +-
 .../org/apache/hadoop/hbase/Version.html        |    4 +-
 .../io/hfile/HFile.CachingBlockReader.html      | 1194 ++-
 .../hadoop/hbase/io/hfile/HFile.FileInfo.html   | 1194 ++-
 .../hadoop/hbase/io/hfile/HFile.Reader.html     | 1194 ++-
 .../hadoop/hbase/io/hfile/HFile.Writer.html     | 1194 ++-
 .../hbase/io/hfile/HFile.WriterFactory.html     | 1194 ++-
 .../org/apache/hadoop/hbase/io/hfile/HFile.html | 1194 ++-
 ...ilePrettyPrinter.KeyValueStatsCollector.html |    2 +-
 ...ilePrettyPrinter.SimpleReporter.Builder.html |    2 +-
 .../HFilePrettyPrinter.SimpleReporter.html      |    2 +-
 .../hbase/io/hfile/HFilePrettyPrinter.html      |    2 +-
 .../mapreduce/HFileOutputFormat2.TableInfo.html | 1126 ++-
 .../HFileOutputFormat2.WriterLength.html        | 1126 ++-
 .../hbase/mapreduce/HFileOutputFormat2.html     | 1126 ++-
 .../LoadIncrementalHFiles.BulkHFileVisitor.html |    6 +-
 .../LoadIncrementalHFiles.LoadQueueItem.html    |    6 +-
 .../hbase/tool/LoadIncrementalHFiles.html       |    6 +-
 .../hadoop/hbase/util/CompressionTest.html      |    2 +-
 ...BaseFsck.CheckRegionConsistencyWorkItem.html | 8575 +++++++++---------
 .../HBaseFsck.ErrorReporter.ERROR_CODE.html     | 8575 +++++++++---------
 .../hbase/util/HBaseFsck.ErrorReporter.html     | 8575 +++++++++---------
 .../hbase/util/HBaseFsck.FileLockCallable.html  | 8575 +++++++++---------
 .../hbase/util/HBaseFsck.HBaseFsckTool.html     | 8575 +++++++++---------
 .../hadoop/hbase/util/HBaseFsck.HbckInfo.html   | 8575 +++++++++---------
 .../hadoop/hbase/util/HBaseFsck.HdfsEntry.html  | 8575 +++++++++---------
 .../hadoop/hbase/util/HBaseFsck.MetaEntry.html  | 8575 +++++++++---------
 .../hbase/util/HBaseFsck.OnlineEntry.html       | 8575 +++++++++---------
 .../util/HBaseFsck.PrintingErrorReporter.html   | 8575 +++++++++---------
 .../HBaseFsck.RegionBoundariesInformation.html  | 8575 +++++++++---------
 .../util/HBaseFsck.RegionRepairException.html   | 8575 +++++++++---------
 .../HBaseFsck.TableInfo.HDFSIntegrityFixer.html | 8575 +++++++++---------
 ...aseFsck.TableInfo.IntegrityFixSuggester.html | 8575 +++++++++---------
 .../hadoop/hbase/util/HBaseFsck.TableInfo.html  | 8575 +++++++++---------
 .../hbase/util/HBaseFsck.WorkItemHdfsDir.html   | 8575 +++++++++---------
 .../util/HBaseFsck.WorkItemHdfsRegionInfo.html  | 8575 +++++++++---------
 .../util/HBaseFsck.WorkItemOverlapMerge.html    | 8575 +++++++++---------
 .../hbase/util/HBaseFsck.WorkItemRegion.html    | 8575 +++++++++---------
 .../org/apache/hadoop/hbase/util/HBaseFsck.html | 8575 +++++++++---------
 ...leCorruptionChecker.MobRegionDirChecker.html |    2 +-
 ...HFileCorruptionChecker.RegionDirChecker.html |    2 +-
 .../hbase/util/hbck/HFileCorruptionChecker.html |    2 +-
 downloads.html                                  |    4 +-
 export_control.html                             |    4 +-
 index.html                                      |    4 +-
 integration.html                                |    4 +-
 issue-tracking.html                             |    4 +-
 license.html                                    |    4 +-
 mail-lists.html                                 |    4 +-
 metrics.html                                    |    4 +-
 old_news.html                                   |    4 +-
 plugin-management.html                          |    4 +-
 plugins.html                                    |    4 +-
 poweredbyhbase.html                             |    4 +-
 project-info.html                               |    4 +-
 project-reports.html                            |    4 +-
 project-summary.html                            |    4 +-
 pseudo-distributed.html                         |    4 +-
 replication.html                                |    4 +-
 resources.html                                  |    4 +-
 source-repository.html                          |    4 +-
 sponsors.html                                   |    4 +-
 supportingprojects.html                         |    4 +-
 team-list.html                                  |    4 +-
 100 files changed, 92228 insertions(+), 92268 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index ad92d73..80e143c 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181211" />
+    <meta name="Date-Revision-yyyymmdd" content="20181212" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -611,7 +611,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-12-11</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-12-12</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index cbe62a6..c7ef172 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20181211143338+00'00')
-/CreationDate (D:20181211144956+00'00')
+/ModDate (D:20181212143302+00'00')
+/CreationDate (D:20181212144847+00'00')
 >>
 endobj
 2 0 obj

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/apidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
----------------------------------------------------------------------
diff --git a/apidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html b/apidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 1173a66..972b2f8 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -409,7 +409,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureIncrementalLoad</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.585">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.583">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;table,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;regionLocator)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -437,7 +437,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockList">
 <li class="blockList">
 <h4>configureIncrementalLoad</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.604">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.602">configureIncrementalLoad</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDescriptor,
                                             <a href="../../../../../org/apache/hadoop/hbase/client/RegionLocator.html" title="interface in org.apache.hadoop.hbase.client">RegionLocator</a>&nbsp;regionLocator)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -465,7 +465,7 @@ extends org.apache.hadoop.mapreduce.lib.output.FileOutputFormat&lt;<a href="../.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>configureIncrementalLoadMap</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.690">configureIncrementalLoadMap</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#line.688">configureIncrementalLoadMap</a>(org.apache.hadoop.mapreduce.Job&nbsp;job,
                                                <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDescriptor)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>


[09/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.TableInfo.HDFSIntegrityFixer.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="

<TRUNCATED>

[16/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.HbckInfo.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the conf.<a name="line.1543"></a>
-<span cla

<TRUNCATED>

[10/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.RegionRepairException.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unl

<TRUNCATED>

[18/41] hbase-site git commit: Published site at 67d6d5084cf8fc094cda4bd3f091d8a0a9cb1d3e.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d702fb71/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
index 62f81b6..f6f6104 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/HBaseFsck.FileLockCallable.html
@@ -930,7 +930,7 @@
 <span class="sourceLineNo">922</span>            // For all the stores in this column family.<a name="line.922"></a>
 <span class="sourceLineNo">923</span>            for (FileStatus storeFile : storeFiles) {<a name="line.923"></a>
 <span class="sourceLineNo">924</span>              HFile.Reader reader = HFile.createReader(fs, storeFile.getPath(),<a name="line.924"></a>
-<span class="sourceLineNo">925</span>                new CacheConfig(getConf()), true, getConf());<a name="line.925"></a>
+<span class="sourceLineNo">925</span>                CacheConfig.DISABLED, true, getConf());<a name="line.925"></a>
 <span class="sourceLineNo">926</span>              if ((reader.getFirstKey() != null)<a name="line.926"></a>
 <span class="sourceLineNo">927</span>                  &amp;&amp; ((storeFirstKey == null) || (comparator.compare(storeFirstKey,<a name="line.927"></a>
 <span class="sourceLineNo">928</span>                      ((KeyValue.KeyOnlyKeyValue) reader.getFirstKey().get()).getKey()) &gt; 0))) {<a name="line.928"></a>
@@ -1033,4295 +1033,4294 @@
 <span class="sourceLineNo">1025</span>        byte[] start, end;<a name="line.1025"></a>
 <span class="sourceLineNo">1026</span>        HFile.Reader hf = null;<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        try {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          CacheConfig cacheConf = new CacheConfig(getConf());<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          hf = HFile.createReader(fs, hfile.getPath(), cacheConf, true, getConf());<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          hf.loadFileInfo();<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        } catch (IOException ioe) {<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>          continue;<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>        } catch (NullPointerException ioe) {<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>          continue;<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>        } finally {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>          if (hf != null) {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>            hf.close();<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>          }<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>        }<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span><a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        // expand the range to include the range of all hfiles<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>        if (orphanRegionRange == null) {<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>          // first range<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span>        } else {<a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>          // TODO add test<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span><a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>          // expand range only if the hfile is wider.<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>            orphanRegionRange.setFirst(start);<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>          }<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span>            orphanRegionRange.setSecond(end);<a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>          }<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>        }<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>      }<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>    }<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>    if (orphanRegionRange == null) {<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>      fixes++;<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>      sidelineRegionDir(fs, hi);<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>      return;<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>    }<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span><a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>    // create new region on hdfs. move data into place.<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>        .build();<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span><a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    // rename all the data to new region<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    mergeRegionDirs(target, hi);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    fixes++;<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>  }<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span><a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>  /**<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>   * then reload to merge potentially overlapping regions.<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>   *<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>   * @return number of table integrity errors found<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>   */<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>    // Determine what's on HDFS<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span><a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>    int errs = errors.getErrorList().size();<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    // First time just get suggestions.<a name="line.1101"></a>
-<span class="sourceLineNo">1102</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1102"></a>
-<span class="sourceLineNo">1103</span>    checkHdfsIntegrity(false, false);<a name="line.1103"></a>
-<span class="sourceLineNo">1104</span><a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    if (errors.getErrorList().size() == errs) {<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>      return 0;<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>    }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>    }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>    // Make sure there are no holes now.<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>    if (shouldFixHdfsHoles()) {<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>      clearState(); // this also resets # fixes.<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>      loadHdfsRegionDirs();<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1119"></a>
-<span class="sourceLineNo">1120</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1120"></a>
-<span class="sourceLineNo">1121</span>    }<a name="line.1121"></a>
-<span class="sourceLineNo">1122</span><a name="line.1122"></a>
-<span class="sourceLineNo">1123</span>    // Now we fix overlaps<a name="line.1123"></a>
-<span class="sourceLineNo">1124</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1124"></a>
-<span class="sourceLineNo">1125</span>      // second pass we fix overlaps.<a name="line.1125"></a>
-<span class="sourceLineNo">1126</span>      clearState(); // this also resets # fixes.<a name="line.1126"></a>
-<span class="sourceLineNo">1127</span>      loadHdfsRegionDirs();<a name="line.1127"></a>
-<span class="sourceLineNo">1128</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1128"></a>
-<span class="sourceLineNo">1129</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1129"></a>
-<span class="sourceLineNo">1130</span>    }<a name="line.1130"></a>
-<span class="sourceLineNo">1131</span><a name="line.1131"></a>
-<span class="sourceLineNo">1132</span>    return errors.getErrorList().size();<a name="line.1132"></a>
-<span class="sourceLineNo">1133</span>  }<a name="line.1133"></a>
-<span class="sourceLineNo">1134</span><a name="line.1134"></a>
-<span class="sourceLineNo">1135</span>  /**<a name="line.1135"></a>
-<span class="sourceLineNo">1136</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1136"></a>
-<span class="sourceLineNo">1137</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1137"></a>
-<span class="sourceLineNo">1138</span>   * any lingering reference file will be sidelined if found.<a name="line.1138"></a>
-<span class="sourceLineNo">1139</span>   * &lt;p&gt;<a name="line.1139"></a>
-<span class="sourceLineNo">1140</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1140"></a>
-<span class="sourceLineNo">1141</span>   * be fixed before a cluster can start properly.<a name="line.1141"></a>
-<span class="sourceLineNo">1142</span>   */<a name="line.1142"></a>
-<span class="sourceLineNo">1143</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1143"></a>
-<span class="sourceLineNo">1144</span>    clearState();<a name="line.1144"></a>
-<span class="sourceLineNo">1145</span>    Configuration conf = getConf();<a name="line.1145"></a>
-<span class="sourceLineNo">1146</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1146"></a>
-<span class="sourceLineNo">1147</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1147"></a>
-<span class="sourceLineNo">1148</span>    LOG.info("Computing mapping of all store files");<a name="line.1148"></a>
-<span class="sourceLineNo">1149</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1149"></a>
-<span class="sourceLineNo">1150</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1150"></a>
-<span class="sourceLineNo">1151</span>    errors.print("");<a name="line.1151"></a>
-<span class="sourceLineNo">1152</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1152"></a>
-<span class="sourceLineNo">1153</span>    for (Path path: allFiles.values()) {<a name="line.1153"></a>
-<span class="sourceLineNo">1154</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1154"></a>
-<span class="sourceLineNo">1155</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1155"></a>
-<span class="sourceLineNo">1156</span><a name="line.1156"></a>
-<span class="sourceLineNo">1157</span>      // Found a lingering reference file<a name="line.1157"></a>
-<span class="sourceLineNo">1158</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1158"></a>
-<span class="sourceLineNo">1159</span>        "Found lingering reference file " + path);<a name="line.1159"></a>
-<span class="sourceLineNo">1160</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1160"></a>
-<span class="sourceLineNo">1161</span><a name="line.1161"></a>
-<span class="sourceLineNo">1162</span>      // Now, trying to fix it since requested<a name="line.1162"></a>
-<span class="sourceLineNo">1163</span>      boolean success = false;<a name="line.1163"></a>
-<span class="sourceLineNo">1164</span>      String pathStr = path.toString();<a name="line.1164"></a>
-<span class="sourceLineNo">1165</span><a name="line.1165"></a>
-<span class="sourceLineNo">1166</span>      // A reference file path should be like<a name="line.1166"></a>
-<span class="sourceLineNo">1167</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1167"></a>
-<span class="sourceLineNo">1168</span>      // Up 5 directories to get the root folder.<a name="line.1168"></a>
-<span class="sourceLineNo">1169</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1169"></a>
-<span class="sourceLineNo">1170</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1170"></a>
-<span class="sourceLineNo">1171</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1171"></a>
-<span class="sourceLineNo">1172</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1172"></a>
-<span class="sourceLineNo">1173</span>      }<a name="line.1173"></a>
-<span class="sourceLineNo">1174</span>      if (index &gt; 0) {<a name="line.1174"></a>
-<span class="sourceLineNo">1175</span>        Path rootDir = getSidelineDir();<a name="line.1175"></a>
-<span class="sourceLineNo">1176</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1176"></a>
-<span class="sourceLineNo">1177</span>        fs.mkdirs(dst.getParent());<a name="line.1177"></a>
-<span class="sourceLineNo">1178</span>        LOG.info("Trying to sideline reference file "<a name="line.1178"></a>
-<span class="sourceLineNo">1179</span>          + path + " to " + dst);<a name="line.1179"></a>
-<span class="sourceLineNo">1180</span>        setShouldRerun();<a name="line.1180"></a>
-<span class="sourceLineNo">1181</span><a name="line.1181"></a>
-<span class="sourceLineNo">1182</span>        success = fs.rename(path, dst);<a name="line.1182"></a>
-<span class="sourceLineNo">1183</span>        debugLsr(dst);<a name="line.1183"></a>
-<span class="sourceLineNo">1184</span><a name="line.1184"></a>
-<span class="sourceLineNo">1185</span>      }<a name="line.1185"></a>
-<span class="sourceLineNo">1186</span>      if (!success) {<a name="line.1186"></a>
-<span class="sourceLineNo">1187</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1187"></a>
-<span class="sourceLineNo">1188</span>      }<a name="line.1188"></a>
-<span class="sourceLineNo">1189</span>    }<a name="line.1189"></a>
-<span class="sourceLineNo">1190</span>  }<a name="line.1190"></a>
-<span class="sourceLineNo">1191</span><a name="line.1191"></a>
-<span class="sourceLineNo">1192</span>  /**<a name="line.1192"></a>
-<span class="sourceLineNo">1193</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1193"></a>
-<span class="sourceLineNo">1194</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1194"></a>
-<span class="sourceLineNo">1195</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1195"></a>
-<span class="sourceLineNo">1196</span>   */<a name="line.1196"></a>
-<span class="sourceLineNo">1197</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1197"></a>
-<span class="sourceLineNo">1198</span>    Configuration conf = getConf();<a name="line.1198"></a>
-<span class="sourceLineNo">1199</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1199"></a>
-<span class="sourceLineNo">1200</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1200"></a>
-<span class="sourceLineNo">1201</span>    LOG.info("Computing mapping of all link files");<a name="line.1201"></a>
-<span class="sourceLineNo">1202</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1202"></a>
-<span class="sourceLineNo">1203</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1203"></a>
-<span class="sourceLineNo">1204</span>    errors.print("");<a name="line.1204"></a>
-<span class="sourceLineNo">1205</span><a name="line.1205"></a>
-<span class="sourceLineNo">1206</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1206"></a>
-<span class="sourceLineNo">1207</span>    for (Path path : allFiles.values()) {<a name="line.1207"></a>
-<span class="sourceLineNo">1208</span>      // building HFileLink object to gather locations<a name="line.1208"></a>
-<span class="sourceLineNo">1209</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1209"></a>
-<span class="sourceLineNo">1210</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1210"></a>
-<span class="sourceLineNo">1211</span><a name="line.1211"></a>
-<span class="sourceLineNo">1212</span>      // Found a lingering HFileLink<a name="line.1212"></a>
-<span class="sourceLineNo">1213</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1213"></a>
-<span class="sourceLineNo">1214</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1214"></a>
-<span class="sourceLineNo">1215</span><a name="line.1215"></a>
-<span class="sourceLineNo">1216</span>      // Now, trying to fix it since requested<a name="line.1216"></a>
-<span class="sourceLineNo">1217</span>      setShouldRerun();<a name="line.1217"></a>
-<span class="sourceLineNo">1218</span><a name="line.1218"></a>
-<span class="sourceLineNo">1219</span>      // An HFileLink path should be like<a name="line.1219"></a>
-<span class="sourceLineNo">1220</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1220"></a>
-<span class="sourceLineNo">1221</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1221"></a>
-<span class="sourceLineNo">1222</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1222"></a>
-<span class="sourceLineNo">1223</span><a name="line.1223"></a>
-<span class="sourceLineNo">1224</span>      if (!success) {<a name="line.1224"></a>
-<span class="sourceLineNo">1225</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1225"></a>
-<span class="sourceLineNo">1226</span>      }<a name="line.1226"></a>
-<span class="sourceLineNo">1227</span><a name="line.1227"></a>
-<span class="sourceLineNo">1228</span>      // An HFileLink backreference path should be like<a name="line.1228"></a>
-<span class="sourceLineNo">1229</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1229"></a>
-<span class="sourceLineNo">1230</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1230"></a>
-<span class="sourceLineNo">1231</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1231"></a>
-<span class="sourceLineNo">1232</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1232"></a>
-<span class="sourceLineNo">1233</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1233"></a>
-<span class="sourceLineNo">1234</span>                  path.getParent().getName()),<a name="line.1234"></a>
-<span class="sourceLineNo">1235</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1235"></a>
-<span class="sourceLineNo">1236</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1236"></a>
-<span class="sourceLineNo">1237</span><a name="line.1237"></a>
-<span class="sourceLineNo">1238</span>      if (!success) {<a name="line.1238"></a>
-<span class="sourceLineNo">1239</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1239"></a>
-<span class="sourceLineNo">1240</span>      }<a name="line.1240"></a>
-<span class="sourceLineNo">1241</span>    }<a name="line.1241"></a>
-<span class="sourceLineNo">1242</span>  }<a name="line.1242"></a>
-<span class="sourceLineNo">1243</span><a name="line.1243"></a>
-<span class="sourceLineNo">1244</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1244"></a>
-<span class="sourceLineNo">1245</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1245"></a>
-<span class="sourceLineNo">1246</span>    if (uri.isAbsolute()) return false;<a name="line.1246"></a>
-<span class="sourceLineNo">1247</span>    String relativePath = uri.getPath();<a name="line.1247"></a>
-<span class="sourceLineNo">1248</span>    Path rootDir = getSidelineDir();<a name="line.1248"></a>
-<span class="sourceLineNo">1249</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1249"></a>
-<span class="sourceLineNo">1250</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1250"></a>
-<span class="sourceLineNo">1251</span>    if (!pathCreated) {<a name="line.1251"></a>
-<span class="sourceLineNo">1252</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1252"></a>
-<span class="sourceLineNo">1253</span>      return false;<a name="line.1253"></a>
-<span class="sourceLineNo">1254</span>    }<a name="line.1254"></a>
-<span class="sourceLineNo">1255</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1255"></a>
-<span class="sourceLineNo">1256</span>    return fs.rename(path, dst);<a name="line.1256"></a>
-<span class="sourceLineNo">1257</span>  }<a name="line.1257"></a>
-<span class="sourceLineNo">1258</span><a name="line.1258"></a>
-<span class="sourceLineNo">1259</span>  /**<a name="line.1259"></a>
-<span class="sourceLineNo">1260</span>   * TODO -- need to add tests for this.<a name="line.1260"></a>
-<span class="sourceLineNo">1261</span>   */<a name="line.1261"></a>
-<span class="sourceLineNo">1262</span>  private void reportEmptyMetaCells() {<a name="line.1262"></a>
-<span class="sourceLineNo">1263</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1263"></a>
-<span class="sourceLineNo">1264</span>      emptyRegionInfoQualifiers.size());<a name="line.1264"></a>
-<span class="sourceLineNo">1265</span>    if (details) {<a name="line.1265"></a>
-<span class="sourceLineNo">1266</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1266"></a>
-<span class="sourceLineNo">1267</span>        errors.print("  " + r);<a name="line.1267"></a>
-<span class="sourceLineNo">1268</span>      }<a name="line.1268"></a>
-<span class="sourceLineNo">1269</span>    }<a name="line.1269"></a>
-<span class="sourceLineNo">1270</span>  }<a name="line.1270"></a>
-<span class="sourceLineNo">1271</span><a name="line.1271"></a>
-<span class="sourceLineNo">1272</span>  /**<a name="line.1272"></a>
-<span class="sourceLineNo">1273</span>   * TODO -- need to add tests for this.<a name="line.1273"></a>
-<span class="sourceLineNo">1274</span>   */<a name="line.1274"></a>
-<span class="sourceLineNo">1275</span>  private void reportTablesInFlux() {<a name="line.1275"></a>
-<span class="sourceLineNo">1276</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1276"></a>
-<span class="sourceLineNo">1277</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1277"></a>
-<span class="sourceLineNo">1278</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1278"></a>
-<span class="sourceLineNo">1279</span>    if (details) {<a name="line.1279"></a>
-<span class="sourceLineNo">1280</span>      if (numSkipped.get() &gt; 0) {<a name="line.1280"></a>
-<span class="sourceLineNo">1281</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1281"></a>
-<span class="sourceLineNo">1282</span>      }<a name="line.1282"></a>
-<span class="sourceLineNo">1283</span>      for (TableDescriptor td : allTables) {<a name="line.1283"></a>
-<span class="sourceLineNo">1284</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1284"></a>
-<span class="sourceLineNo">1285</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1285"></a>
-<span class="sourceLineNo">1286</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1286"></a>
-<span class="sourceLineNo">1287</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1287"></a>
-<span class="sourceLineNo">1288</span>      }<a name="line.1288"></a>
-<span class="sourceLineNo">1289</span>    }<a name="line.1289"></a>
-<span class="sourceLineNo">1290</span>  }<a name="line.1290"></a>
-<span class="sourceLineNo">1291</span><a name="line.1291"></a>
-<span class="sourceLineNo">1292</span>  public ErrorReporter getErrors() {<a name="line.1292"></a>
-<span class="sourceLineNo">1293</span>    return errors;<a name="line.1293"></a>
-<span class="sourceLineNo">1294</span>  }<a name="line.1294"></a>
-<span class="sourceLineNo">1295</span><a name="line.1295"></a>
-<span class="sourceLineNo">1296</span>  /**<a name="line.1296"></a>
-<span class="sourceLineNo">1297</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1297"></a>
-<span class="sourceLineNo">1298</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1298"></a>
-<span class="sourceLineNo">1299</span>   */<a name="line.1299"></a>
-<span class="sourceLineNo">1300</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1300"></a>
-<span class="sourceLineNo">1301</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1301"></a>
-<span class="sourceLineNo">1302</span>    if (regionDir == null) {<a name="line.1302"></a>
-<span class="sourceLineNo">1303</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1303"></a>
-<span class="sourceLineNo">1304</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1304"></a>
-<span class="sourceLineNo">1305</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1305"></a>
-<span class="sourceLineNo">1306</span>      }<a name="line.1306"></a>
-<span class="sourceLineNo">1307</span>      return;<a name="line.1307"></a>
-<span class="sourceLineNo">1308</span>    }<a name="line.1308"></a>
-<span class="sourceLineNo">1309</span><a name="line.1309"></a>
-<span class="sourceLineNo">1310</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1310"></a>
-<span class="sourceLineNo">1311</span>      // already loaded data<a name="line.1311"></a>
-<span class="sourceLineNo">1312</span>      return;<a name="line.1312"></a>
-<span class="sourceLineNo">1313</span>    }<a name="line.1313"></a>
-<span class="sourceLineNo">1314</span><a name="line.1314"></a>
-<span class="sourceLineNo">1315</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1315"></a>
-<span class="sourceLineNo">1316</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1316"></a>
-<span class="sourceLineNo">1317</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1317"></a>
-<span class="sourceLineNo">1318</span>    hbi.hdfsEntry.hri = hri;<a name="line.1318"></a>
-<span class="sourceLineNo">1319</span>  }<a name="line.1319"></a>
-<span class="sourceLineNo">1320</span><a name="line.1320"></a>
-<span class="sourceLineNo">1321</span>  /**<a name="line.1321"></a>
-<span class="sourceLineNo">1322</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1322"></a>
-<span class="sourceLineNo">1323</span>   * unresolvable way.<a name="line.1323"></a>
-<span class="sourceLineNo">1324</span>   */<a name="line.1324"></a>
-<span class="sourceLineNo">1325</span>  public static class RegionRepairException extends IOException {<a name="line.1325"></a>
-<span class="sourceLineNo">1326</span>    private static final long serialVersionUID = 1L;<a name="line.1326"></a>
-<span class="sourceLineNo">1327</span>    final IOException ioe;<a name="line.1327"></a>
-<span class="sourceLineNo">1328</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1328"></a>
-<span class="sourceLineNo">1329</span>      super(s);<a name="line.1329"></a>
-<span class="sourceLineNo">1330</span>      this.ioe = ioe;<a name="line.1330"></a>
-<span class="sourceLineNo">1331</span>    }<a name="line.1331"></a>
-<span class="sourceLineNo">1332</span>  }<a name="line.1332"></a>
-<span class="sourceLineNo">1333</span><a name="line.1333"></a>
-<span class="sourceLineNo">1334</span>  /**<a name="line.1334"></a>
-<span class="sourceLineNo">1335</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1335"></a>
-<span class="sourceLineNo">1336</span>   */<a name="line.1336"></a>
-<span class="sourceLineNo">1337</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1337"></a>
-<span class="sourceLineNo">1338</span>      throws IOException, InterruptedException {<a name="line.1338"></a>
-<span class="sourceLineNo">1339</span>    tablesInfo.clear(); // regenerating the data<a name="line.1339"></a>
-<span class="sourceLineNo">1340</span>    // generate region split structure<a name="line.1340"></a>
-<span class="sourceLineNo">1341</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1341"></a>
-<span class="sourceLineNo">1342</span><a name="line.1342"></a>
-<span class="sourceLineNo">1343</span>    // Parallelized read of .regioninfo files.<a name="line.1343"></a>
-<span class="sourceLineNo">1344</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1344"></a>
-<span class="sourceLineNo">1345</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1345"></a>
-<span class="sourceLineNo">1346</span><a name="line.1346"></a>
-<span class="sourceLineNo">1347</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1347"></a>
-<span class="sourceLineNo">1348</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1348"></a>
-<span class="sourceLineNo">1349</span>      hbis.add(work);<a name="line.1349"></a>
-<span class="sourceLineNo">1350</span>    }<a name="line.1350"></a>
-<span class="sourceLineNo">1351</span><a name="line.1351"></a>
-<span class="sourceLineNo">1352</span>    // Submit and wait for completion<a name="line.1352"></a>
-<span class="sourceLineNo">1353</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1353"></a>
-<span class="sourceLineNo">1354</span><a name="line.1354"></a>
-<span class="sourceLineNo">1355</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1355"></a>
-<span class="sourceLineNo">1356</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1356"></a>
-<span class="sourceLineNo">1357</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1357"></a>
-<span class="sourceLineNo">1358</span>      try {<a name="line.1358"></a>
-<span class="sourceLineNo">1359</span>        f.get();<a name="line.1359"></a>
-<span class="sourceLineNo">1360</span>      } catch(ExecutionException e) {<a name="line.1360"></a>
-<span class="sourceLineNo">1361</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1361"></a>
-<span class="sourceLineNo">1362</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1362"></a>
-<span class="sourceLineNo">1363</span>      }<a name="line.1363"></a>
-<span class="sourceLineNo">1364</span>    }<a name="line.1364"></a>
-<span class="sourceLineNo">1365</span><a name="line.1365"></a>
-<span class="sourceLineNo">1366</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1366"></a>
-<span class="sourceLineNo">1367</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1367"></a>
-<span class="sourceLineNo">1368</span>    // serialized table info gathering.<a name="line.1368"></a>
-<span class="sourceLineNo">1369</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1369"></a>
-<span class="sourceLineNo">1370</span><a name="line.1370"></a>
-<span class="sourceLineNo">1371</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1371"></a>
-<span class="sourceLineNo">1372</span>        // was an orphan<a name="line.1372"></a>
-<span class="sourceLineNo">1373</span>        continue;<a name="line.1373"></a>
-<span class="sourceLineNo">1374</span>      }<a name="line.1374"></a>
+<span class="sourceLineNo">1028</span>          hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf());<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>          hf.loadFileInfo();<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span>          Optional&lt;Cell&gt; startKv = hf.getFirstKey();<a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>          start = CellUtil.cloneRow(startKv.get());<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>          Optional&lt;Cell&gt; endKv = hf.getLastKey();<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>          end = CellUtil.cloneRow(endKv.get());<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>        } catch (IOException ioe) {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>          LOG.warn("Problem reading orphan file " + hfile + ", skipping");<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>          continue;<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>        } catch (NullPointerException ioe) {<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>          LOG.warn("Orphan file " + hfile + " is possibly corrupted HFile, skipping");<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span>          continue;<a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>        } finally {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>          if (hf != null) {<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>            hf.close();<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>          }<a name="line.1043"></a>
+<span class="sourceLineNo">1044</span>        }<a name="line.1044"></a>
+<span class="sourceLineNo">1045</span><a name="line.1045"></a>
+<span class="sourceLineNo">1046</span>        // expand the range to include the range of all hfiles<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>        if (orphanRegionRange == null) {<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>          // first range<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span>          orphanRegionRange = new Pair&lt;&gt;(start, end);<a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>        } else {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>          // TODO add test<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span><a name="line.1052"></a>
+<span class="sourceLineNo">1053</span>          // expand range only if the hfile is wider.<a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>          if (Bytes.compareTo(orphanRegionRange.getFirst(), start) &gt; 0) {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>            orphanRegionRange.setFirst(start);<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>          }<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>          if (Bytes.compareTo(orphanRegionRange.getSecond(), end) &lt; 0 ) {<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>            orphanRegionRange.setSecond(end);<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>          }<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        }<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>      }<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>    }<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>    if (orphanRegionRange == null) {<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>      LOG.warn("No data in dir " + p + ", sidelining data");<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>      fixes++;<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>      sidelineRegionDir(fs, hi);<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>      return;<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>    }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span>    LOG.info("Min max keys are : [" + Bytes.toString(orphanRegionRange.getFirst()) + ", " +<a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>        Bytes.toString(orphanRegionRange.getSecond()) + ")");<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span><a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    // create new region on hdfs. move data into place.<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>    RegionInfo regionInfo = RegionInfoBuilder.newBuilder(template.getTableName())<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>        .setStartKey(orphanRegionRange.getFirst())<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>        .setEndKey(Bytes.add(orphanRegionRange.getSecond(), new byte[1]))<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>        .build();<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    LOG.info("Creating new region : " + regionInfo);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    HRegion region = HBaseFsckRepair.createHDFSRegionDir(getConf(), regionInfo, template);<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>    Path target = region.getRegionFileSystem().getRegionDir();<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span><a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>    // rename all the data to new region<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>    mergeRegionDirs(target, hi);<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>    fixes++;<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>  }<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span><a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>  /**<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>   * This method determines if there are table integrity errors in HDFS.  If<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>   * there are errors and the appropriate "fix" options are enabled, the method<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>   * will first correct orphan regions making them into legit regiondirs, and<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>   * then reload to merge potentially overlapping regions.<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>   *<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>   * @return number of table integrity errors found<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>   */<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>  private int restoreHdfsIntegrity() throws IOException, InterruptedException {<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    // Determine what's on HDFS<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>    LOG.info("Loading HBase regioninfo from HDFS...");<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span>    loadHdfsRegionDirs(); // populating regioninfo table.<a name="line.1097"></a>
+<span class="sourceLineNo">1098</span><a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    int errs = errors.getErrorList().size();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    // First time just get suggestions.<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1101"></a>
+<span class="sourceLineNo">1102</span>    checkHdfsIntegrity(false, false);<a name="line.1102"></a>
+<span class="sourceLineNo">1103</span><a name="line.1103"></a>
+<span class="sourceLineNo">1104</span>    if (errors.getErrorList().size() == errs) {<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>      LOG.info("No integrity errors.  We are done with this phase. Glorious.");<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>      return 0;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>    }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>    if (shouldFixHdfsOrphans() &amp;&amp; orphanHdfsDirs.size() &gt; 0) {<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>      adoptHdfsOrphans(orphanHdfsDirs);<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>      // TODO optimize by incrementally adding instead of reloading.<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>    }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span><a name="line.1113"></a>
+<span class="sourceLineNo">1114</span>    // Make sure there are no holes now.<a name="line.1114"></a>
+<span class="sourceLineNo">1115</span>    if (shouldFixHdfsHoles()) {<a name="line.1115"></a>
+<span class="sourceLineNo">1116</span>      clearState(); // this also resets # fixes.<a name="line.1116"></a>
+<span class="sourceLineNo">1117</span>      loadHdfsRegionDirs();<a name="line.1117"></a>
+<span class="sourceLineNo">1118</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1118"></a>
+<span class="sourceLineNo">1119</span>      tablesInfo = checkHdfsIntegrity(shouldFixHdfsHoles(), false);<a name="line.1119"></a>
+<span class="sourceLineNo">1120</span>    }<a name="line.1120"></a>
+<span class="sourceLineNo">1121</span><a name="line.1121"></a>
+<span class="sourceLineNo">1122</span>    // Now we fix overlaps<a name="line.1122"></a>
+<span class="sourceLineNo">1123</span>    if (shouldFixHdfsOverlaps()) {<a name="line.1123"></a>
+<span class="sourceLineNo">1124</span>      // second pass we fix overlaps.<a name="line.1124"></a>
+<span class="sourceLineNo">1125</span>      clearState(); // this also resets # fixes.<a name="line.1125"></a>
+<span class="sourceLineNo">1126</span>      loadHdfsRegionDirs();<a name="line.1126"></a>
+<span class="sourceLineNo">1127</span>      tablesInfo = loadHdfsRegionInfos(); // update tableInfos based on region info in fs.<a name="line.1127"></a>
+<span class="sourceLineNo">1128</span>      tablesInfo = checkHdfsIntegrity(false, shouldFixHdfsOverlaps());<a name="line.1128"></a>
+<span class="sourceLineNo">1129</span>    }<a name="line.1129"></a>
+<span class="sourceLineNo">1130</span><a name="line.1130"></a>
+<span class="sourceLineNo">1131</span>    return errors.getErrorList().size();<a name="line.1131"></a>
+<span class="sourceLineNo">1132</span>  }<a name="line.1132"></a>
+<span class="sourceLineNo">1133</span><a name="line.1133"></a>
+<span class="sourceLineNo">1134</span>  /**<a name="line.1134"></a>
+<span class="sourceLineNo">1135</span>   * Scan all the store file names to find any lingering reference files,<a name="line.1135"></a>
+<span class="sourceLineNo">1136</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1136"></a>
+<span class="sourceLineNo">1137</span>   * any lingering reference file will be sidelined if found.<a name="line.1137"></a>
+<span class="sourceLineNo">1138</span>   * &lt;p&gt;<a name="line.1138"></a>
+<span class="sourceLineNo">1139</span>   * Lingering reference file prevents a region from opening. It has to<a name="line.1139"></a>
+<span class="sourceLineNo">1140</span>   * be fixed before a cluster can start properly.<a name="line.1140"></a>
+<span class="sourceLineNo">1141</span>   */<a name="line.1141"></a>
+<span class="sourceLineNo">1142</span>  private void offlineReferenceFileRepair() throws IOException, InterruptedException {<a name="line.1142"></a>
+<span class="sourceLineNo">1143</span>    clearState();<a name="line.1143"></a>
+<span class="sourceLineNo">1144</span>    Configuration conf = getConf();<a name="line.1144"></a>
+<span class="sourceLineNo">1145</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1145"></a>
+<span class="sourceLineNo">1146</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1146"></a>
+<span class="sourceLineNo">1147</span>    LOG.info("Computing mapping of all store files");<a name="line.1147"></a>
+<span class="sourceLineNo">1148</span>    Map&lt;String, Path&gt; allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot,<a name="line.1148"></a>
+<span class="sourceLineNo">1149</span>      new FSUtils.ReferenceFileFilter(fs), executor, errors);<a name="line.1149"></a>
+<span class="sourceLineNo">1150</span>    errors.print("");<a name="line.1150"></a>
+<span class="sourceLineNo">1151</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1151"></a>
+<span class="sourceLineNo">1152</span>    for (Path path: allFiles.values()) {<a name="line.1152"></a>
+<span class="sourceLineNo">1153</span>      Path referredToFile = StoreFileInfo.getReferredToFile(path);<a name="line.1153"></a>
+<span class="sourceLineNo">1154</span>      if (fs.exists(referredToFile)) continue;  // good, expected<a name="line.1154"></a>
+<span class="sourceLineNo">1155</span><a name="line.1155"></a>
+<span class="sourceLineNo">1156</span>      // Found a lingering reference file<a name="line.1156"></a>
+<span class="sourceLineNo">1157</span>      errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,<a name="line.1157"></a>
+<span class="sourceLineNo">1158</span>        "Found lingering reference file " + path);<a name="line.1158"></a>
+<span class="sourceLineNo">1159</span>      if (!shouldFixReferenceFiles()) continue;<a name="line.1159"></a>
+<span class="sourceLineNo">1160</span><a name="line.1160"></a>
+<span class="sourceLineNo">1161</span>      // Now, trying to fix it since requested<a name="line.1161"></a>
+<span class="sourceLineNo">1162</span>      boolean success = false;<a name="line.1162"></a>
+<span class="sourceLineNo">1163</span>      String pathStr = path.toString();<a name="line.1163"></a>
+<span class="sourceLineNo">1164</span><a name="line.1164"></a>
+<span class="sourceLineNo">1165</span>      // A reference file path should be like<a name="line.1165"></a>
+<span class="sourceLineNo">1166</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/referred_file.region_name<a name="line.1166"></a>
+<span class="sourceLineNo">1167</span>      // Up 5 directories to get the root folder.<a name="line.1167"></a>
+<span class="sourceLineNo">1168</span>      // So the file will be sidelined to a similar folder structure.<a name="line.1168"></a>
+<span class="sourceLineNo">1169</span>      int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);<a name="line.1169"></a>
+<span class="sourceLineNo">1170</span>      for (int i = 0; index &gt; 0 &amp;&amp; i &lt; 5; i++) {<a name="line.1170"></a>
+<span class="sourceLineNo">1171</span>        index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index - 1);<a name="line.1171"></a>
+<span class="sourceLineNo">1172</span>      }<a name="line.1172"></a>
+<span class="sourceLineNo">1173</span>      if (index &gt; 0) {<a name="line.1173"></a>
+<span class="sourceLineNo">1174</span>        Path rootDir = getSidelineDir();<a name="line.1174"></a>
+<span class="sourceLineNo">1175</span>        Path dst = new Path(rootDir, pathStr.substring(index + 1));<a name="line.1175"></a>
+<span class="sourceLineNo">1176</span>        fs.mkdirs(dst.getParent());<a name="line.1176"></a>
+<span class="sourceLineNo">1177</span>        LOG.info("Trying to sideline reference file "<a name="line.1177"></a>
+<span class="sourceLineNo">1178</span>          + path + " to " + dst);<a name="line.1178"></a>
+<span class="sourceLineNo">1179</span>        setShouldRerun();<a name="line.1179"></a>
+<span class="sourceLineNo">1180</span><a name="line.1180"></a>
+<span class="sourceLineNo">1181</span>        success = fs.rename(path, dst);<a name="line.1181"></a>
+<span class="sourceLineNo">1182</span>        debugLsr(dst);<a name="line.1182"></a>
+<span class="sourceLineNo">1183</span><a name="line.1183"></a>
+<span class="sourceLineNo">1184</span>      }<a name="line.1184"></a>
+<span class="sourceLineNo">1185</span>      if (!success) {<a name="line.1185"></a>
+<span class="sourceLineNo">1186</span>        LOG.error("Failed to sideline reference file " + path);<a name="line.1186"></a>
+<span class="sourceLineNo">1187</span>      }<a name="line.1187"></a>
+<span class="sourceLineNo">1188</span>    }<a name="line.1188"></a>
+<span class="sourceLineNo">1189</span>  }<a name="line.1189"></a>
+<span class="sourceLineNo">1190</span><a name="line.1190"></a>
+<span class="sourceLineNo">1191</span>  /**<a name="line.1191"></a>
+<span class="sourceLineNo">1192</span>   * Scan all the store file names to find any lingering HFileLink files,<a name="line.1192"></a>
+<span class="sourceLineNo">1193</span>   * which refer to some none-exiting files. If "fix" option is enabled,<a name="line.1193"></a>
+<span class="sourceLineNo">1194</span>   * any lingering HFileLink file will be sidelined if found.<a name="line.1194"></a>
+<span class="sourceLineNo">1195</span>   */<a name="line.1195"></a>
+<span class="sourceLineNo">1196</span>  private void offlineHLinkFileRepair() throws IOException, InterruptedException {<a name="line.1196"></a>
+<span class="sourceLineNo">1197</span>    Configuration conf = getConf();<a name="line.1197"></a>
+<span class="sourceLineNo">1198</span>    Path hbaseRoot = FSUtils.getRootDir(conf);<a name="line.1198"></a>
+<span class="sourceLineNo">1199</span>    FileSystem fs = hbaseRoot.getFileSystem(conf);<a name="line.1199"></a>
+<span class="sourceLineNo">1200</span>    LOG.info("Computing mapping of all link files");<a name="line.1200"></a>
+<span class="sourceLineNo">1201</span>    Map&lt;String, Path&gt; allFiles = FSUtils<a name="line.1201"></a>
+<span class="sourceLineNo">1202</span>        .getTableStoreFilePathMap(fs, hbaseRoot, new FSUtils.HFileLinkFilter(), executor, errors);<a name="line.1202"></a>
+<span class="sourceLineNo">1203</span>    errors.print("");<a name="line.1203"></a>
+<span class="sourceLineNo">1204</span><a name="line.1204"></a>
+<span class="sourceLineNo">1205</span>    LOG.info("Validating mapping using HDFS state");<a name="line.1205"></a>
+<span class="sourceLineNo">1206</span>    for (Path path : allFiles.values()) {<a name="line.1206"></a>
+<span class="sourceLineNo">1207</span>      // building HFileLink object to gather locations<a name="line.1207"></a>
+<span class="sourceLineNo">1208</span>      HFileLink actualLink = HFileLink.buildFromHFileLinkPattern(conf, path);<a name="line.1208"></a>
+<span class="sourceLineNo">1209</span>      if (actualLink.exists(fs)) continue; // good, expected<a name="line.1209"></a>
+<span class="sourceLineNo">1210</span><a name="line.1210"></a>
+<span class="sourceLineNo">1211</span>      // Found a lingering HFileLink<a name="line.1211"></a>
+<span class="sourceLineNo">1212</span>      errors.reportError(ERROR_CODE.LINGERING_HFILELINK, "Found lingering HFileLink " + path);<a name="line.1212"></a>
+<span class="sourceLineNo">1213</span>      if (!shouldFixHFileLinks()) continue;<a name="line.1213"></a>
+<span class="sourceLineNo">1214</span><a name="line.1214"></a>
+<span class="sourceLineNo">1215</span>      // Now, trying to fix it since requested<a name="line.1215"></a>
+<span class="sourceLineNo">1216</span>      setShouldRerun();<a name="line.1216"></a>
+<span class="sourceLineNo">1217</span><a name="line.1217"></a>
+<span class="sourceLineNo">1218</span>      // An HFileLink path should be like<a name="line.1218"></a>
+<span class="sourceLineNo">1219</span>      // ${hbase.rootdir}/data/namespace/table_name/region_id/family_name/linkedtable=linkedregionname-linkedhfilename<a name="line.1219"></a>
+<span class="sourceLineNo">1220</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1220"></a>
+<span class="sourceLineNo">1221</span>      boolean success = sidelineFile(fs, hbaseRoot, path);<a name="line.1221"></a>
+<span class="sourceLineNo">1222</span><a name="line.1222"></a>
+<span class="sourceLineNo">1223</span>      if (!success) {<a name="line.1223"></a>
+<span class="sourceLineNo">1224</span>        LOG.error("Failed to sideline HFileLink file " + path);<a name="line.1224"></a>
+<span class="sourceLineNo">1225</span>      }<a name="line.1225"></a>
+<span class="sourceLineNo">1226</span><a name="line.1226"></a>
+<span class="sourceLineNo">1227</span>      // An HFileLink backreference path should be like<a name="line.1227"></a>
+<span class="sourceLineNo">1228</span>      // ${hbase.rootdir}/archive/data/namespace/table_name/region_id/family_name/.links-linkedhfilename<a name="line.1228"></a>
+<span class="sourceLineNo">1229</span>      // sidelineing will happen in the ${hbase.rootdir}/${sidelinedir} directory with the same folder structure.<a name="line.1229"></a>
+<span class="sourceLineNo">1230</span>      Path backRefPath = FileLink.getBackReferencesDir(HFileArchiveUtil<a name="line.1230"></a>
+<span class="sourceLineNo">1231</span>              .getStoreArchivePath(conf, HFileLink.getReferencedTableName(path.getName().toString()),<a name="line.1231"></a>
+<span class="sourceLineNo">1232</span>                  HFileLink.getReferencedRegionName(path.getName().toString()),<a name="line.1232"></a>
+<span class="sourceLineNo">1233</span>                  path.getParent().getName()),<a name="line.1233"></a>
+<span class="sourceLineNo">1234</span>          HFileLink.getReferencedHFileName(path.getName().toString()));<a name="line.1234"></a>
+<span class="sourceLineNo">1235</span>      success = sidelineFile(fs, hbaseRoot, backRefPath);<a name="line.1235"></a>
+<span class="sourceLineNo">1236</span><a name="line.1236"></a>
+<span class="sourceLineNo">1237</span>      if (!success) {<a name="line.1237"></a>
+<span class="sourceLineNo">1238</span>        LOG.error("Failed to sideline HFileLink backreference file " + path);<a name="line.1238"></a>
+<span class="sourceLineNo">1239</span>      }<a name="line.1239"></a>
+<span class="sourceLineNo">1240</span>    }<a name="line.1240"></a>
+<span class="sourceLineNo">1241</span>  }<a name="line.1241"></a>
+<span class="sourceLineNo">1242</span><a name="line.1242"></a>
+<span class="sourceLineNo">1243</span>  private boolean sidelineFile(FileSystem fs, Path hbaseRoot, Path path) throws IOException {<a name="line.1243"></a>
+<span class="sourceLineNo">1244</span>    URI uri = hbaseRoot.toUri().relativize(path.toUri());<a name="line.1244"></a>
+<span class="sourceLineNo">1245</span>    if (uri.isAbsolute()) return false;<a name="line.1245"></a>
+<span class="sourceLineNo">1246</span>    String relativePath = uri.getPath();<a name="line.1246"></a>
+<span class="sourceLineNo">1247</span>    Path rootDir = getSidelineDir();<a name="line.1247"></a>
+<span class="sourceLineNo">1248</span>    Path dst = new Path(rootDir, relativePath);<a name="line.1248"></a>
+<span class="sourceLineNo">1249</span>    boolean pathCreated = fs.mkdirs(dst.getParent());<a name="line.1249"></a>
+<span class="sourceLineNo">1250</span>    if (!pathCreated) {<a name="line.1250"></a>
+<span class="sourceLineNo">1251</span>      LOG.error("Failed to create path: " + dst.getParent());<a name="line.1251"></a>
+<span class="sourceLineNo">1252</span>      return false;<a name="line.1252"></a>
+<span class="sourceLineNo">1253</span>    }<a name="line.1253"></a>
+<span class="sourceLineNo">1254</span>    LOG.info("Trying to sideline file " + path + " to " + dst);<a name="line.1254"></a>
+<span class="sourceLineNo">1255</span>    return fs.rename(path, dst);<a name="line.1255"></a>
+<span class="sourceLineNo">1256</span>  }<a name="line.1256"></a>
+<span class="sourceLineNo">1257</span><a name="line.1257"></a>
+<span class="sourceLineNo">1258</span>  /**<a name="line.1258"></a>
+<span class="sourceLineNo">1259</span>   * TODO -- need to add tests for this.<a name="line.1259"></a>
+<span class="sourceLineNo">1260</span>   */<a name="line.1260"></a>
+<span class="sourceLineNo">1261</span>  private void reportEmptyMetaCells() {<a name="line.1261"></a>
+<span class="sourceLineNo">1262</span>    errors.print("Number of empty REGIONINFO_QUALIFIER rows in hbase:meta: " +<a name="line.1262"></a>
+<span class="sourceLineNo">1263</span>      emptyRegionInfoQualifiers.size());<a name="line.1263"></a>
+<span class="sourceLineNo">1264</span>    if (details) {<a name="line.1264"></a>
+<span class="sourceLineNo">1265</span>      for (Result r: emptyRegionInfoQualifiers) {<a name="line.1265"></a>
+<span class="sourceLineNo">1266</span>        errors.print("  " + r);<a name="line.1266"></a>
+<span class="sourceLineNo">1267</span>      }<a name="line.1267"></a>
+<span class="sourceLineNo">1268</span>    }<a name="line.1268"></a>
+<span class="sourceLineNo">1269</span>  }<a name="line.1269"></a>
+<span class="sourceLineNo">1270</span><a name="line.1270"></a>
+<span class="sourceLineNo">1271</span>  /**<a name="line.1271"></a>
+<span class="sourceLineNo">1272</span>   * TODO -- need to add tests for this.<a name="line.1272"></a>
+<span class="sourceLineNo">1273</span>   */<a name="line.1273"></a>
+<span class="sourceLineNo">1274</span>  private void reportTablesInFlux() {<a name="line.1274"></a>
+<span class="sourceLineNo">1275</span>    AtomicInteger numSkipped = new AtomicInteger(0);<a name="line.1275"></a>
+<span class="sourceLineNo">1276</span>    TableDescriptor[] allTables = getTables(numSkipped);<a name="line.1276"></a>
+<span class="sourceLineNo">1277</span>    errors.print("Number of Tables: " + allTables.length);<a name="line.1277"></a>
+<span class="sourceLineNo">1278</span>    if (details) {<a name="line.1278"></a>
+<span class="sourceLineNo">1279</span>      if (numSkipped.get() &gt; 0) {<a name="line.1279"></a>
+<span class="sourceLineNo">1280</span>        errors.detail("Number of Tables in flux: " + numSkipped.get());<a name="line.1280"></a>
+<span class="sourceLineNo">1281</span>      }<a name="line.1281"></a>
+<span class="sourceLineNo">1282</span>      for (TableDescriptor td : allTables) {<a name="line.1282"></a>
+<span class="sourceLineNo">1283</span>        errors.detail("  Table: " + td.getTableName() + "\t" +<a name="line.1283"></a>
+<span class="sourceLineNo">1284</span>                           (td.isReadOnly() ? "ro" : "rw") + "\t" +<a name="line.1284"></a>
+<span class="sourceLineNo">1285</span>                            (td.isMetaRegion() ? "META" : "    ") + "\t" +<a name="line.1285"></a>
+<span class="sourceLineNo">1286</span>                           " families: " + td.getColumnFamilyCount());<a name="line.1286"></a>
+<span class="sourceLineNo">1287</span>      }<a name="line.1287"></a>
+<span class="sourceLineNo">1288</span>    }<a name="line.1288"></a>
+<span class="sourceLineNo">1289</span>  }<a name="line.1289"></a>
+<span class="sourceLineNo">1290</span><a name="line.1290"></a>
+<span class="sourceLineNo">1291</span>  public ErrorReporter getErrors() {<a name="line.1291"></a>
+<span class="sourceLineNo">1292</span>    return errors;<a name="line.1292"></a>
+<span class="sourceLineNo">1293</span>  }<a name="line.1293"></a>
+<span class="sourceLineNo">1294</span><a name="line.1294"></a>
+<span class="sourceLineNo">1295</span>  /**<a name="line.1295"></a>
+<span class="sourceLineNo">1296</span>   * Read the .regioninfo file from the file system.  If there is no<a name="line.1296"></a>
+<span class="sourceLineNo">1297</span>   * .regioninfo, add it to the orphan hdfs region list.<a name="line.1297"></a>
+<span class="sourceLineNo">1298</span>   */<a name="line.1298"></a>
+<span class="sourceLineNo">1299</span>  private void loadHdfsRegioninfo(HbckInfo hbi) throws IOException {<a name="line.1299"></a>
+<span class="sourceLineNo">1300</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1300"></a>
+<span class="sourceLineNo">1301</span>    if (regionDir == null) {<a name="line.1301"></a>
+<span class="sourceLineNo">1302</span>      if (hbi.getReplicaId() == RegionInfo.DEFAULT_REPLICA_ID) {<a name="line.1302"></a>
+<span class="sourceLineNo">1303</span>        // Log warning only for default/ primary replica with no region dir<a name="line.1303"></a>
+<span class="sourceLineNo">1304</span>        LOG.warn("No HDFS region dir found: " + hbi + " meta=" + hbi.metaEntry);<a name="line.1304"></a>
+<span class="sourceLineNo">1305</span>      }<a name="line.1305"></a>
+<span class="sourceLineNo">1306</span>      return;<a name="line.1306"></a>
+<span class="sourceLineNo">1307</span>    }<a name="line.1307"></a>
+<span class="sourceLineNo">1308</span><a name="line.1308"></a>
+<span class="sourceLineNo">1309</span>    if (hbi.hdfsEntry.hri != null) {<a name="line.1309"></a>
+<span class="sourceLineNo">1310</span>      // already loaded data<a name="line.1310"></a>
+<span class="sourceLineNo">1311</span>      return;<a name="line.1311"></a>
+<span class="sourceLineNo">1312</span>    }<a name="line.1312"></a>
+<span class="sourceLineNo">1313</span><a name="line.1313"></a>
+<span class="sourceLineNo">1314</span>    FileSystem fs = FileSystem.get(getConf());<a name="line.1314"></a>
+<span class="sourceLineNo">1315</span>    RegionInfo hri = HRegionFileSystem.loadRegionInfoFileContent(fs, regionDir);<a name="line.1315"></a>
+<span class="sourceLineNo">1316</span>    LOG.debug("RegionInfo read: " + hri.toString());<a name="line.1316"></a>
+<span class="sourceLineNo">1317</span>    hbi.hdfsEntry.hri = hri;<a name="line.1317"></a>
+<span class="sourceLineNo">1318</span>  }<a name="line.1318"></a>
+<span class="sourceLineNo">1319</span><a name="line.1319"></a>
+<span class="sourceLineNo">1320</span>  /**<a name="line.1320"></a>
+<span class="sourceLineNo">1321</span>   * Exception thrown when a integrity repair operation fails in an<a name="line.1321"></a>
+<span class="sourceLineNo">1322</span>   * unresolvable way.<a name="line.1322"></a>
+<span class="sourceLineNo">1323</span>   */<a name="line.1323"></a>
+<span class="sourceLineNo">1324</span>  public static class RegionRepairException extends IOException {<a name="line.1324"></a>
+<span class="sourceLineNo">1325</span>    private static final long serialVersionUID = 1L;<a name="line.1325"></a>
+<span class="sourceLineNo">1326</span>    final IOException ioe;<a name="line.1326"></a>
+<span class="sourceLineNo">1327</span>    public RegionRepairException(String s, IOException ioe) {<a name="line.1327"></a>
+<span class="sourceLineNo">1328</span>      super(s);<a name="line.1328"></a>
+<span class="sourceLineNo">1329</span>      this.ioe = ioe;<a name="line.1329"></a>
+<span class="sourceLineNo">1330</span>    }<a name="line.1330"></a>
+<span class="sourceLineNo">1331</span>  }<a name="line.1331"></a>
+<span class="sourceLineNo">1332</span><a name="line.1332"></a>
+<span class="sourceLineNo">1333</span>  /**<a name="line.1333"></a>
+<span class="sourceLineNo">1334</span>   * Populate hbi's from regionInfos loaded from file system.<a name="line.1334"></a>
+<span class="sourceLineNo">1335</span>   */<a name="line.1335"></a>
+<span class="sourceLineNo">1336</span>  private SortedMap&lt;TableName, TableInfo&gt; loadHdfsRegionInfos()<a name="line.1336"></a>
+<span class="sourceLineNo">1337</span>      throws IOException, InterruptedException {<a name="line.1337"></a>
+<span class="sourceLineNo">1338</span>    tablesInfo.clear(); // regenerating the data<a name="line.1338"></a>
+<span class="sourceLineNo">1339</span>    // generate region split structure<a name="line.1339"></a>
+<span class="sourceLineNo">1340</span>    Collection&lt;HbckInfo&gt; hbckInfos = regionInfoMap.values();<a name="line.1340"></a>
+<span class="sourceLineNo">1341</span><a name="line.1341"></a>
+<span class="sourceLineNo">1342</span>    // Parallelized read of .regioninfo files.<a name="line.1342"></a>
+<span class="sourceLineNo">1343</span>    List&lt;WorkItemHdfsRegionInfo&gt; hbis = new ArrayList&lt;&gt;(hbckInfos.size());<a name="line.1343"></a>
+<span class="sourceLineNo">1344</span>    List&lt;Future&lt;Void&gt;&gt; hbiFutures;<a name="line.1344"></a>
+<span class="sourceLineNo">1345</span><a name="line.1345"></a>
+<span class="sourceLineNo">1346</span>    for (HbckInfo hbi : hbckInfos) {<a name="line.1346"></a>
+<span class="sourceLineNo">1347</span>      WorkItemHdfsRegionInfo work = new WorkItemHdfsRegionInfo(hbi, this, errors);<a name="line.1347"></a>
+<span class="sourceLineNo">1348</span>      hbis.add(work);<a name="line.1348"></a>
+<span class="sourceLineNo">1349</span>    }<a name="line.1349"></a>
+<span class="sourceLineNo">1350</span><a name="line.1350"></a>
+<span class="sourceLineNo">1351</span>    // Submit and wait for completion<a name="line.1351"></a>
+<span class="sourceLineNo">1352</span>    hbiFutures = executor.invokeAll(hbis);<a name="line.1352"></a>
+<span class="sourceLineNo">1353</span><a name="line.1353"></a>
+<span class="sourceLineNo">1354</span>    for(int i=0; i&lt;hbiFutures.size(); i++) {<a name="line.1354"></a>
+<span class="sourceLineNo">1355</span>      WorkItemHdfsRegionInfo work = hbis.get(i);<a name="line.1355"></a>
+<span class="sourceLineNo">1356</span>      Future&lt;Void&gt; f = hbiFutures.get(i);<a name="line.1356"></a>
+<span class="sourceLineNo">1357</span>      try {<a name="line.1357"></a>
+<span class="sourceLineNo">1358</span>        f.get();<a name="line.1358"></a>
+<span class="sourceLineNo">1359</span>      } catch(ExecutionException e) {<a name="line.1359"></a>
+<span class="sourceLineNo">1360</span>        LOG.warn("Failed to read .regioninfo file for region " +<a name="line.1360"></a>
+<span class="sourceLineNo">1361</span>              work.hbi.getRegionNameAsString(), e.getCause());<a name="line.1361"></a>
+<span class="sourceLineNo">1362</span>      }<a name="line.1362"></a>
+<span class="sourceLineNo">1363</span>    }<a name="line.1363"></a>
+<span class="sourceLineNo">1364</span><a name="line.1364"></a>
+<span class="sourceLineNo">1365</span>    Path hbaseRoot = FSUtils.getRootDir(getConf());<a name="line.1365"></a>
+<span class="sourceLineNo">1366</span>    FileSystem fs = hbaseRoot.getFileSystem(getConf());<a name="line.1366"></a>
+<span class="sourceLineNo">1367</span>    // serialized table info gathering.<a name="line.1367"></a>
+<span class="sourceLineNo">1368</span>    for (HbckInfo hbi: hbckInfos) {<a name="line.1368"></a>
+<span class="sourceLineNo">1369</span><a name="line.1369"></a>
+<span class="sourceLineNo">1370</span>      if (hbi.getHdfsHRI() == null) {<a name="line.1370"></a>
+<span class="sourceLineNo">1371</span>        // was an orphan<a name="line.1371"></a>
+<span class="sourceLineNo">1372</span>        continue;<a name="line.1372"></a>
+<span class="sourceLineNo">1373</span>      }<a name="line.1373"></a>
+<span class="sourceLineNo">1374</span><a name="line.1374"></a>
 <span class="sourceLineNo">1375</span><a name="line.1375"></a>
-<span class="sourceLineNo">1376</span><a name="line.1376"></a>
-<span class="sourceLineNo">1377</span>      // get table name from hdfs, populate various HBaseFsck tables.<a name="line.1377"></a>
-<span class="sourceLineNo">1378</span>      TableName tableName = hbi.getTableName();<a name="line.1378"></a>
-<span class="sourceLineNo">1379</span>      if (tableName == null) {<a name="line.1379"></a>
-<span class="sourceLineNo">1380</span>        // There was an entry in hbase:meta not in the HDFS?<a name="line.1380"></a>
-<span class="sourceLineNo">1381</span>        LOG.warn("tableName was null for: " + hbi);<a name="line.1381"></a>
-<span class="sourceLineNo">1382</span>        continue;<a name="line.1382"></a>
-<span class="sourceLineNo">1383</span>      }<a name="line.1383"></a>
-<span class="sourceLineNo">1384</span><a name="line.1384"></a>
-<span class="sourceLineNo">1385</span>      TableInfo modTInfo = tablesInfo.get(tableName);<a name="line.1385"></a>
-<span class="sourceLineNo">1386</span>      if (modTInfo == null) {<a name="line.1386"></a>
-<span class="sourceLineNo">1387</span>        // only executed once per table.<a name="line.1387"></a>
-<span class="sourceLineNo">1388</span>        modTInfo = new TableInfo(tableName);<a name="line.1388"></a>
-<span class="sourceLineNo">1389</span>        tablesInfo.put(tableName, modTInfo);<a name="line.1389"></a>
-<span class="sourceLineNo">1390</span>        try {<a name="line.1390"></a>
-<span class="sourceLineNo">1391</span>          TableDescriptor htd =<a name="line.1391"></a>
-<span class="sourceLineNo">1392</span>              FSTableDescriptors.getTableDescriptorFromFs(fs, hbaseRoot, tableName);<a name="line.1392"></a>
-<span class="sourceLineNo">1393</span>          modTInfo.htds.add(htd);<a name="line.1393"></a>
-<span class="sourceLineNo">1394</span>        } catch (IOException ioe) {<a name="line.1394"></a>
-<span class="sourceLineNo">1395</span>          if (!orphanTableDirs.containsKey(tableName)) {<a name="line.1395"></a>
-<span class="sourceLineNo">1396</span>            LOG.warn("Unable to read .tableinfo from " + hbaseRoot, ioe);<a name="line.1396"></a>
-<span class="sourceLineNo">1397</span>            //should only report once for each table<a name="line.1397"></a>
-<span class="sourceLineNo">1398</span>            errors.reportError(ERROR_CODE.NO_TABLEINFO_FILE,<a name="line.1398"></a>
-<span class="sourceLineNo">1399</span>                "Unable to read .tableinfo from " + hbaseRoot + "/" + tableName);<a name="line.1399"></a>
-<span class="sourceLineNo">1400</span>            Set&lt;String&gt; columns = new HashSet&lt;&gt;();<a name="line.1400"></a>
-<span class="sourceLineNo">1401</span>            orphanTableDirs.put(tableName, getColumnFamilyList(columns, hbi));<a name="line.1401"></a>
-<span class="sourceLineNo">1402</span>          }<a name="line.1402"></a>
-<span class="sourceLineNo">1403</span>        }<a name="line.1403"></a>
-<span class="sourceLineNo">1404</span>      }<a name="line.1404"></a>
-<span class="sourceLineNo">1405</span>      if (!hbi.isSkipChecks()) {<a name="line.1405"></a>
-<span class="sourceLineNo">1406</span>        modTInfo.addRegionInfo(hbi);<a name="line.1406"></a>
-<span class="sourceLineNo">1407</span>      }<a name="line.1407"></a>
-<span class="sourceLineNo">1408</span>    }<a name="line.1408"></a>
-<span class="sourceLineNo">1409</span><a name="line.1409"></a>
-<span class="sourceLineNo">1410</span>    loadTableInfosForTablesWithNoRegion();<a name="line.1410"></a>
-<span class="sourceLineNo">1411</span>    errors.print("");<a name="line.1411"></a>
-<span class="sourceLineNo">1412</span><a name="line.1412"></a>
-<span class="sourceLineNo">1413</span>    return tablesInfo;<a name="line.1413"></a>
-<span class="sourceLineNo">1414</span>  }<a name="line.1414"></a>
-<span class="sourceLineNo">1415</span><a name="line.1415"></a>
-<span class="sourceLineNo">1416</span>  /**<a name="line.1416"></a>
-<span class="sourceLineNo">1417</span>   * To get the column family list according to the column family dirs<a name="line.1417"></a>
-<span class="sourceLineNo">1418</span>   * @param columns<a name="line.1418"></a>
-<span class="sourceLineNo">1419</span>   * @param hbi<a name="line.1419"></a>
-<span class="sourceLineNo">1420</span>   * @return a set of column families<a name="line.1420"></a>
-<span class="sourceLineNo">1421</span>   * @throws IOException<a name="line.1421"></a>
-<span class="sourceLineNo">1422</span>   */<a name="line.1422"></a>
-<span class="sourceLineNo">1423</span>  private Set&lt;String&gt; getColumnFamilyList(Set&lt;String&gt; columns, HbckInfo hbi) throws IOException {<a name="line.1423"></a>
-<span class="sourceLineNo">1424</span>    Path regionDir = hbi.getHdfsRegionDir();<a name="line.1424"></a>
-<span class="sourceLineNo">1425</span>    FileSystem fs = regionDir.getFileSystem(getConf());<a name="line.1425"></a>
-<span class="sourceLineNo">1426</span>    FileStatus[] subDirs = fs.listStatus(regionDir, new FSUtils.FamilyDirFilter(fs));<a name="line.1426"></a>
-<span class="sourceLineNo">1427</span>    for (FileStatus subdir : subDirs) {<a name="line.1427"></a>
-<span class="sourceLineNo">1428</span>      String columnfamily = subdir.getPath().getName();<a name="line.1428"></a>
-<span class="sourceLineNo">1429</span>      columns.add(columnfamily);<a name="line.1429"></a>
-<span class="sourceLineNo">1430</span>    }<a name="line.1430"></a>
-<span class="sourceLineNo">1431</span>    return columns;<a name="line.1431"></a>
-<span class="sourceLineNo">1432</span>  }<a name="line.1432"></a>
-<span class="sourceLineNo">1433</span><a name="line.1433"></a>
-<span class="sourceLineNo">1434</span>  /**<a name="line.1434"></a>
-<span class="sourceLineNo">1435</span>   * To fabricate a .tableinfo file with following contents&lt;br&gt;<a name="line.1435"></a>
-<span class="sourceLineNo">1436</span>   * 1. the correct tablename &lt;br&gt;<a name="line.1436"></a>
-<span class="sourceLineNo">1437</span>   * 2. the correct colfamily list&lt;br&gt;<a name="line.1437"></a>
-<span class="sourceLineNo">1438</span>   * 3. the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1438"></a>
-<span class="sourceLineNo">1439</span>   * @throws IOException<a name="line.1439"></a>
-<span class="sourceLineNo">1440</span>   */<a name="line.1440"></a>
-<span class="sourceLineNo">1441</span>  private boolean fabricateTableInfo(FSTableDescriptors fstd, TableName tableName,<a name="line.1441"></a>
-<span class="sourceLineNo">1442</span>      Set&lt;String&gt; columns) throws IOException {<a name="line.1442"></a>
-<span class="sourceLineNo">1443</span>    if (columns ==null || columns.isEmpty()) return false;<a name="line.1443"></a>
-<span class="sourceLineNo">1444</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.1444"></a>
-<span class="sourceLineNo">1445</span>    for (String columnfamimly : columns) {<a name="line.1445"></a>
-<span class="sourceLineNo">1446</span>      builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(columnfamimly));<a name="line.1446"></a>
-<span class="sourceLineNo">1447</span>    }<a name="line.1447"></a>
-<span class="sourceLineNo">1448</span>    fstd.createTableDescriptor(builder.build(), true);<a name="line.1448"></a>
-<span class="sourceLineNo">1449</span>    return true;<a name="line.1449"></a>
-<span class="sourceLineNo">1450</span>  }<a name="line.1450"></a>
-<span class="sourceLineNo">1451</span><a name="line.1451"></a>
-<span class="sourceLineNo">1452</span>  /**<a name="line.1452"></a>
-<span class="sourceLineNo">1453</span>   * To fix the empty REGIONINFO_QUALIFIER rows from hbase:meta &lt;br&gt;<a name="line.1453"></a>
-<span class="sourceLineNo">1454</span>   * @throws IOException<a name="line.1454"></a>
-<span class="sourceLineNo">1455</span>   */<a name="line.1455"></a>
-<span class="sourceLineNo">1456</span>  public void fixEmptyMetaCells() throws IOException {<a name="line.1456"></a>
-<span class="sourceLineNo">1457</span>    if (shouldFixEmptyMetaCells() &amp;&amp; !emptyRegionInfoQualifiers.isEmpty()) {<a name="line.1457"></a>
-<span class="sourceLineNo">1458</span>      LOG.info("Trying to fix empty REGIONINFO_QUALIFIER hbase:meta rows.");<a name="line.1458"></a>
-<span class="sourceLineNo">1459</span>      for (Result region : emptyRegionInfoQualifiers) {<a name="line.1459"></a>
-<span class="sourceLineNo">1460</span>        deleteMetaRegion(region.getRow());<a name="line.1460"></a>
-<span class="sourceLineNo">1461</span>        errors.getErrorList().remove(ERROR_CODE.EMPTY_META_CELL);<a name="line.1461"></a>
-<span class="sourceLineNo">1462</span>      }<a name="line.1462"></a>
-<span class="sourceLineNo">1463</span>      emptyRegionInfoQualifiers.clear();<a name="line.1463"></a>
-<span class="sourceLineNo">1464</span>    }<a name="line.1464"></a>
-<span class="sourceLineNo">1465</span>  }<a name="line.1465"></a>
-<span class="sourceLineNo">1466</span><a name="line.1466"></a>
-<span class="sourceLineNo">1467</span>  /**<a name="line.1467"></a>
-<span class="sourceLineNo">1468</span>   * To fix orphan table by creating a .tableinfo file under tableDir &lt;br&gt;<a name="line.1468"></a>
-<span class="sourceLineNo">1469</span>   * 1. if TableInfo is cached, to recover the .tableinfo accordingly &lt;br&gt;<a name="line.1469"></a>
-<span class="sourceLineNo">1470</span>   * 2. else create a default .tableinfo file with following items&lt;br&gt;<a name="line.1470"></a>
-<span class="sourceLineNo">1471</span>   * &amp;nbsp;2.1 the correct tablename &lt;br&gt;<a name="line.1471"></a>
-<span class="sourceLineNo">1472</span>   * &amp;nbsp;2.2 the correct colfamily list&lt;br&gt;<a name="line.1472"></a>
-<span class="sourceLineNo">1473</span>   * &amp;nbsp;2.3 the default properties for both {@link TableDescriptor} and {@link ColumnFamilyDescriptor}&lt;br&gt;<a name="line.1473"></a>
-<span class="sourceLineNo">1474</span>   * @throws IOException<a name="line.1474"></a>
-<span class="sourceLineNo">1475</span>   */<a name="line.1475"></a>
-<span class="sourceLineNo">1476</span>  public void fixOrphanTables() throws IOException {<a name="line.1476"></a>
-<span class="sourceLineNo">1477</span>    if (shouldFixTableOrphans() &amp;&amp; !orphanTableDirs.isEmpty()) {<a name="line.1477"></a>
-<span class="sourceLineNo">1478</span><a name="line.1478"></a>
-<span class="sourceLineNo">1479</span>      List&lt;TableName&gt; tmpList = new ArrayList&lt;&gt;(orphanTableDirs.keySet().size());<a name="line.1479"></a>
-<span class="sourceLineNo">1480</span>      tmpList.addAll(orphanTableDirs.keySet());<a name="line.1480"></a>
-<span class="sourceLineNo">1481</span>      TableDescriptor[] htds = getTableDescriptors(tmpList);<a name="line.1481"></a>
-<span class="sourceLineNo">1482</span>      Iterator&lt;Entry&lt;TableName, Set&lt;String&gt;&gt;&gt; iter =<a name="line.1482"></a>
-<span class="sourceLineNo">1483</span>          orphanTableDirs.entrySet().iterator();<a name="line.1483"></a>
-<span class="sourceLineNo">1484</span>      int j = 0;<a name="line.1484"></a>
-<span class="sourceLineNo">1485</span>      int numFailedCase = 0;<a name="line.1485"></a>
-<span class="sourceLineNo">1486</span>      FSTableDescriptors fstd = new FSTableDescriptors(getConf());<a name="line.1486"></a>
-<span class="sourceLineNo">1487</span>      while (iter.hasNext()) {<a name="line.1487"></a>
-<span class="sourceLineNo">1488</span>        Entry&lt;TableName, Set&lt;String&gt;&gt; entry =<a name="line.1488"></a>
-<span class="sourceLineNo">1489</span>            iter.next();<a name="line.1489"></a>
-<span class="sourceLineNo">1490</span>        TableName tableName = entry.getKey();<a name="line.1490"></a>
-<span class="sourceLineNo">1491</span>        LOG.info("Trying to fix orphan table error: " + tableName);<a name="line.1491"></a>
-<span class="sourceLineNo">1492</span>        if (j &lt; htds.length) {<a name="line.1492"></a>
-<span class="sourceLineNo">1493</span>          if (tableName.equals(htds[j].getTableName())) {<a name="line.1493"></a>
-<span class="sourceLineNo">1494</span>            TableDescriptor htd = htds[j];<a name="line.1494"></a>
-<span class="sourceLineNo">1495</span>            LOG.info("fixing orphan table: " + tableName + " from cache");<a name="line.1495"></a>
-<span class="sourceLineNo">1496</span>            fstd.createTableDescriptor(htd, true);<a name="line.1496"></a>
-<span class="sourceLineNo">1497</span>            j++;<a name="line.1497"></a>
-<span class="sourceLineNo">1498</span>            iter.remove();<a name="line.1498"></a>
-<span class="sourceLineNo">1499</span>          }<a name="line.1499"></a>
-<span class="sourceLineNo">1500</span>        } else {<a name="line.1500"></a>
-<span class="sourceLineNo">1501</span>          if (fabricateTableInfo(fstd, tableName, entry.getValue())) {<a name="line.1501"></a>
-<span class="sourceLineNo">1502</span>            LOG.warn("fixing orphan table: " + tableName + " with a default .tableinfo file");<a name="line.1502"></a>
-<span class="sourceLineNo">1503</span>            LOG.warn("Strongly recommend to modify the TableDescriptor if necessary for: " + tableName);<a name="line.1503"></a>
-<span class="sourceLineNo">1504</span>            iter.remove();<a name="line.1504"></a>
-<span class="sourceLineNo">1505</span>          } else {<a name="line.1505"></a>
-<span class="sourceLineNo">1506</span>            LOG.error("Unable to create default .tableinfo for " + tableName + " while missing column family information");<a name="line.1506"></a>
-<span class="sourceLineNo">1507</span>            numFailedCase++;<a name="line.1507"></a>
-<span class="sourceLineNo">1508</span>          }<a name="line.1508"></a>
-<span class="sourceLineNo">1509</span>        }<a name="line.1509"></a>
-<span class="sourceLineNo">1510</span>        fixes++;<a name="line.1510"></a>
-<span class="sourceLineNo">1511</span>      }<a name="line.1511"></a>
-<span class="sourceLineNo">1512</span><a name="line.1512"></a>
-<span class="sourceLineNo">1513</span>      if (orphanTableDirs.isEmpty()) {<a name="line.1513"></a>
-<span class="sourceLineNo">1514</span>        // all orphanTableDirs are luckily recovered<a name="line.1514"></a>
-<span class="sourceLineNo">1515</span>        // re-run doFsck after recovering the .tableinfo file<a name="line.1515"></a>
-<span class="sourceLineNo">1516</span>        setShouldRerun();<a name="line.1516"></a>
-<span class="sourceLineNo">1517</span>        LOG.warn("Strongly recommend to re-run manually hfsck after all orphanTableDirs being fixed");<a name="line.1517"></a>
-<span class="sourceLineNo">1518</span>      } else if (numFailedCase &gt; 0) {<a name="line.1518"></a>
-<span class="sourceLineNo">1519</span>        LOG.error("Failed to fix " + numFailedCase<a name="line.1519"></a>
-<span class="sourceLineNo">1520</span>            + " OrphanTables with default .tableinfo files");<a name="line.1520"></a>
-<span class="sourceLineNo">1521</span>      }<a name="line.1521"></a>
-<span class="sourceLineNo">1522</span><a name="line.1522"></a>
-<span class="sourceLineNo">1523</span>    }<a name="line.1523"></a>
-<span class="sourceLineNo">1524</span>    //cleanup the list<a name="line.1524"></a>
-<span class="sourceLineNo">1525</span>    orphanTableDirs.clear();<a name="line.1525"></a>
-<span class="sourceLineNo">1526</span><a name="line.1526"></a>
-<span class="sourceLineNo">1527</span>  }<a name="line.1527"></a>
-<span class="sourceLineNo">1528</span><a name="line.1528"></a>
-<span class="sourceLineNo">1529</span>  /**<a name="line.1529"></a>
-<span class="sourceLineNo">1530</span>   * This borrows code from MasterFileSystem.bootstrap(). Explicitly creates it's own WAL, so be<a name="line.1530"></a>
-<span class="sourceLineNo">1531</span>   * sure to close it as well as the region when you're finished.<a name="line.1531"></a>
-<span class="sourceLineNo">1532</span>   * @param walFactoryID A unique identifier for WAL factory. Filesystem implementations will use<a name="line.1532"></a>
-<span class="sourceLineNo">1533</span>   *          this ID to make a directory inside WAL directory path.<a name="line.1533"></a>
-<span class="sourceLineNo">1534</span>   * @return an open hbase:meta HRegion<a name="line.1534"></a>
-<span class="sourceLineNo">1535</span>   */<a name="line.1535"></a>
-<span class="sourceLineNo">1536</span>  private HRegion createNewMeta(String walFactoryID) throws IOException {<a name="line.1536"></a>
-<span class="sourceLineNo">1537</span>    Path rootdir = FSUtils.getRootDir(getConf());<a name="line.1537"></a>
-<span class="sourceLineNo">1538</span>    Configuration c = getConf();<a name="line.1538"></a>
-<span class="sourceLineNo">1539</span>    RegionInfo metaHRI = RegionInfoBuilder.FIRST_META_REGIONINFO;<a name="line.1539"></a>
-<span class="sourceLineNo">1540</span>    TableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);<a name="line.1540"></a>
-<span class="sourceLineNo">1541</span>    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);<a name="line.1541"></a>
-<span class="sourceLineNo">1542</span>    // The WAL subsystem will use the default rootDir rather than the passed in rootDir<a name="line.1542"></a>
-<span class="sourceLineNo">1543</span>    // unless I pass along via the 

<TRUNCATED>