You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2014/12/04 21:19:06 UTC

hbase git commit: HBASE-12204 Backport HBASE-12016 'Reduce number of versions in Meta table. Make it configurable' to branch-1

Repository: hbase
Updated Branches:
  refs/heads/branch-1 ffbfe01bb -> 2e47c37b7


HBASE-12204 Backport HBASE-12016 'Reduce number of versions in Meta table. Make it configurable' to branch-1


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2e47c37b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2e47c37b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2e47c37b

Branch: refs/heads/branch-1
Commit: 2e47c37b7102aa88ac99bacb8950105178ec0e29
Parents: ffbfe01
Author: tedyu <yu...@gmail.com>
Authored: Thu Dec 4 12:18:58 2014 -0800
Committer: tedyu <yu...@gmail.com>
Committed: Thu Dec 4 12:18:58 2014 -0800

----------------------------------------------------------------------
 .../apache/hadoop/hbase/HTableDescriptor.java   | 34 ++++++++++++++++++--
 .../hadoop/hbase/client/ConnectionManager.java  |  3 --
 .../org/apache/hadoop/hbase/HConstants.java     | 20 ++++++++++++
 .../hadoop/hbase/master/MasterFileSystem.java   | 22 +++++++------
 .../hadoop/hbase/regionserver/HRegion.java      |  4 ++-
 .../hbase/regionserver/HRegionServer.java       |  2 +-
 .../hbase/security/access/AccessController.java | 11 ++-----
 .../hadoop/hbase/snapshot/SnapshotManifest.java |  2 +-
 .../hadoop/hbase/util/FSTableDescriptors.java   | 18 +++++++----
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |  8 ++---
 .../org/apache/hadoop/hbase/util/MetaUtils.java |  6 ++--
 .../org/apache/hadoop/hbase/HBaseTestCase.java  | 14 ++++++--
 .../hadoop/hbase/HBaseTestingUtility.java       | 12 +++++++
 .../TestFSTableDescriptorForceCreation.java     |  6 ++--
 .../apache/hadoop/hbase/client/TestAdmin2.java  |  2 +-
 .../hadoop/hbase/master/TestMasterFailover.java |  6 ++--
 .../hbase/migration/TestNamespaceUpgrade.java   |  2 +-
 .../regionserver/TestGetClosestAtOrBefore.java  |  5 ++-
 .../hbase/regionserver/TestHRegionInfo.java     |  8 +++--
 .../hbase/util/TestFSTableDescriptors.java      | 21 ++++++------
 .../apache/hadoop/hbase/util/TestHBaseFsck.java |  4 +--
 .../hadoop/hbase/util/TestMergeTable.java       |  4 +--
 .../apache/hadoop/hbase/util/TestMergeTool.java |  2 +-
 23 files changed, 146 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 95f1771..d16e8ba 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -39,6 +39,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -1376,15 +1377,20 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
               new Path(name.getNamespaceAsString(), new Path(name.getQualifierAsString()))));
   }
 
-  /** Table descriptor for <code>hbase:meta</code> catalog table */
+  /**
+   * Table descriptor for <code>hbase:meta</code> catalog table
+   * @deprecated Use TableDescriptors#get(TableName.META_TABLE_NAME) or
+   * HBaseAdmin#getTableDescriptor(TableName.META_TABLE_NAME) instead.
+   */
+  @Deprecated
   public static final HTableDescriptor META_TABLEDESC = new HTableDescriptor(
       TableName.META_TABLE_NAME,
       new HColumnDescriptor[] {
           new HColumnDescriptor(HConstants.CATALOG_FAMILY)
               // Ten is arbitrary number.  Keep versions to help debugging.
-              .setMaxVersions(10)
+              .setMaxVersions(HConstants.DEFAULT_HBASE_META_VERSIONS)
               .setInMemory(true)
-              .setBlocksize(8 * 1024)
+              .setBlocksize(HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)
               .setScope(HConstants.REPLICATION_SCOPE_LOCAL)
               // Disable blooms for meta.  Needs work.  Seems to mess w/ getClosestOrBefore.
               .setBloomFilterType(BloomType.NONE)
@@ -1563,4 +1569,26 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
   public void removeConfiguration(final String key) {
     configuration.remove(key);
   }
+
+  public static HTableDescriptor metaTableDescriptor(final Configuration conf)
+      throws IOException {
+    HTableDescriptor metaDescriptor = new HTableDescriptor(
+      TableName.META_TABLE_NAME,
+      new HColumnDescriptor[] {
+        new HColumnDescriptor(HConstants.CATALOG_FAMILY)
+          .setMaxVersions(conf.getInt(HConstants.HBASE_META_VERSIONS,
+            HConstants.DEFAULT_HBASE_META_VERSIONS))
+          .setInMemory(true)
+          .setBlocksize(conf.getInt(HConstants.HBASE_META_BLOCK_SIZE,
+            HConstants.DEFAULT_HBASE_META_BLOCK_SIZE))
+          .setScope(HConstants.REPLICATION_SCOPE_LOCAL)
+          // Disable blooms for meta.  Needs work.  Seems to mess w/ getClosestOrBefore.
+          .setBloomFilterType(BloomType.NONE)
+         });
+    metaDescriptor.addCoprocessor(
+      "org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint",
+      null, Coprocessor.PRIORITY_SYSTEM, null);
+    return metaDescriptor;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
index b34d415..1026d86 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
@@ -2456,9 +2456,6 @@ class ConnectionManager {
     public HTableDescriptor getHTableDescriptor(final TableName tableName)
     throws IOException {
       if (tableName == null) return null;
-      if (tableName.equals(TableName.META_TABLE_NAME)) {
-        return HTableDescriptor.META_TABLEDESC;
-      }
       MasterKeepAliveConnection master = getKeepAliveMasterService();
       GetTableDescriptorsResponse htds;
       try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 454f346..7700812 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -684,6 +684,26 @@ public final class HConstants {
   public static final int DEFAULT_HBASE_CLIENT_SCANNER_CACHING = 100;
 
   /**
+   * Parameter name for number of versions, kept by meta table.
+   */
+  public static String HBASE_META_VERSIONS = "hbase.meta.versions";
+
+  /**
+   * Default value of {@link #HBASE_META_VERSIONS}.
+   */
+  public static int DEFAULT_HBASE_META_VERSIONS = 10;
+
+  /**
+   * Parameter name for number of versions, kept by meta table.
+   */
+  public static String HBASE_META_BLOCK_SIZE = "hbase.meta.blocksize";
+
+  /**
+   * Default value of {@link #HBASE_META_BLOCK_SIZE}.
+   */
+  public static int DEFAULT_HBASE_META_BLOCK_SIZE = 8 * 1024;
+
+  /**
    * Parameter name for number of rows that will be fetched when calling next on
    * a scanner if it is not served from memory. Higher caching values will
    * enable faster scanners but will eat up more memory and some calls of next

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index 2532876..7650b94 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -459,7 +459,12 @@ public class MasterFileSystem {
     }
 
     // Create tableinfo-s for hbase:meta if not already there.
-    new FSTableDescriptors(fs, rd).createTableDescriptor(HTableDescriptor.META_TABLEDESC);
+
+    // meta table is a system table, so descriptors are predefined,
+    // we should get them from registry.
+    FSTableDescriptors fsd = new FSTableDescriptors(c, fs, rd);
+    fsd.createTableDescriptor(
+      new HTableDescriptor(fsd.get(TableName.META_TABLE_NAME)));
 
     return rd;
   }
@@ -499,10 +504,10 @@ public class MasterFileSystem {
       // not make it in first place.  Turn off block caching for bootstrap.
       // Enable after.
       HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO);
-      setInfoFamilyCachingForMeta(false);
-      HRegion meta = HRegion.createHRegion(metaHRI, rd, c,
-          HTableDescriptor.META_TABLEDESC, null, true, true);
-      setInfoFamilyCachingForMeta(true);
+      HTableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);
+      setInfoFamilyCachingForMeta(metaDescriptor, false);
+      HRegion meta = HRegion.createHRegion(metaHRI, rd, c, metaDescriptor);
+      setInfoFamilyCachingForMeta(metaDescriptor, true);
       HRegion.closeHRegion(meta);
     } catch (IOException e) {
       e = RemoteExceptionHandler.checkIOException(e);
@@ -514,9 +519,9 @@ public class MasterFileSystem {
   /**
    * Enable in memory caching for hbase:meta
    */
-  public static void setInfoFamilyCachingForMeta(final boolean b) {
-    for (HColumnDescriptor hcd:
-        HTableDescriptor.META_TABLEDESC.getColumnFamilies()) {
+  public static void setInfoFamilyCachingForMeta(final HTableDescriptor metaDescriptor,
+      final boolean b) {
+    for (HColumnDescriptor hcd: metaDescriptor.getColumnFamilies()) {
       if (Bytes.equals(hcd.getName(), HConstants.CATALOG_FAMILY)) {
         hcd.setBlockCacheEnabled(b);
         hcd.setInMemory(b);
@@ -524,7 +529,6 @@ public class MasterFileSystem {
     }
   }
 
-
   public void deleteRegion(HRegionInfo region) throws IOException {
     HFileArchiver.archiveRegion(conf, fs, region);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 9874683..5618f32 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -146,6 +146,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.CompressionTest;
 import org.apache.hadoop.hbase.util.Counter;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HashedBytes;
 import org.apache.hadoop.hbase.util.Pair;
@@ -5956,12 +5957,13 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver { //
       final boolean majorCompact)
   throws IOException {
     HRegion region;
+    FSTableDescriptors fst = new FSTableDescriptors(c);
     // Currently expects tables have one region only.
     if (FSUtils.getTableName(p).equals(TableName.META_TABLE_NAME)) {
       final WAL wal = walFactory.getMetaWAL(
           HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes());
       region = HRegion.newHRegion(p, wal, fs, c,
-        HRegionInfo.FIRST_META_REGIONINFO, HTableDescriptor.META_TABLEDESC, null);
+        HRegionInfo.FIRST_META_REGIONINFO, fst.get(TableName.META_TABLE_NAME), null);
     } else {
       throw new IOException("Not a known catalog table: " + p.toString());
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index c3e8650..30cadee 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -515,7 +515,7 @@ public class HRegionServer extends HasThread implements
     this.fs = new HFileSystem(this.conf, useHBaseChecksum);
     this.rootDir = FSUtils.getRootDir(this.conf);
     this.tableDescriptors = new FSTableDescriptors(
-      this.fs, this.rootDir, !canUpdateTableDescriptor(), false);
+      this.conf, this.fs, this.rootDir, !canUpdateTableDescriptor(), false);
 
     service = new ExecutorService(getServerName().toShortString());
     spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index e106d7a..d444747 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -49,8 +49,6 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotDisabledException;
-import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.TagRewriteCell;
 import org.apache.hadoop.hbase.client.Append;
@@ -2241,14 +2239,9 @@ public class AccessController extends BaseMasterAndRegionObserver
     else {
       MasterServices masterServices = ctx.getEnvironment().getMasterServices();
       for (TableName tableName: tableNamesList) {
-        // Do not deny if the table does not exist
-        try {
-          masterServices.checkTableModifiable(tableName);
-        } catch (TableNotFoundException ex) {
-          // Skip checks for a table that does not exist
+        // Skip checks for a table that does not exist
+        if (masterServices.getTableDescriptors().get(tableName) == null) {
           continue;
-        } catch (TableNotDisabledException ex) {
-          // We don't care about this
         }
         requirePermission("getTableDescriptors", tableName, null, null,
           Action.ADMIN, Action.CREATE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 4e0b75f..38ccf08 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -352,7 +352,7 @@ public class SnapshotManifest {
       Path rootDir = FSUtils.getRootDir(conf);
       LOG.info("Using old Snapshot Format");
       // write a copy of descriptor to the snapshot directory
-      new FSTableDescriptors(fs, rootDir)
+      new FSTableDescriptors(conf, fs, rootDir)
         .createTableDescriptorForTableDirectory(workingDir, htd, false);
     } else {
       LOG.debug("Convert to Single Snapshot Manifest");

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index 30ebc7d..7cd2673 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -92,30 +92,36 @@ public class FSTableDescriptors implements TableDescriptors {
     new ConcurrentHashMap<TableName, HTableDescriptor>();
 
   /**
+   * Table descriptor for <code>hbase:meta</code> catalog table
+   */
+   private final HTableDescriptor metaTableDescriptor;
+
+   /**
    * Construct a FSTableDescriptors instance using the hbase root dir of the given
    * conf and the filesystem where that root dir lives.
    * This instance can do write operations (is not read only).
    */
   public FSTableDescriptors(final Configuration conf) throws IOException {
-    this(FSUtils.getCurrentFileSystem(conf), FSUtils.getRootDir(conf));
+    this(conf, FSUtils.getCurrentFileSystem(conf), FSUtils.getRootDir(conf));
   }
 
-  public FSTableDescriptors(final FileSystem fs, final Path rootdir)
-  throws IOException {
-    this(fs, rootdir, false, true);
+  public FSTableDescriptors(final Configuration conf, final FileSystem fs, final Path rootdir)
+      throws IOException {
+    this(conf, fs, rootdir, false, true);
   }
 
   /**
    * @param fsreadonly True if we are read-only when it comes to filesystem
    * operations; i.e. on remove, we do not do delete in fs.
    */
-  public FSTableDescriptors(final FileSystem fs,
+  public FSTableDescriptors(final Configuration conf, final FileSystem fs,
     final Path rootdir, final boolean fsreadonly, final boolean usecache) throws IOException {
     super();
     this.fs = fs;
     this.rootdir = rootdir;
     this.fsreadonly = fsreadonly;
     this.usecache = usecache;
+    this.metaTableDescriptor = HTableDescriptor.metaTableDescriptor(conf);
   }
 
   public void setCacheOn() throws IOException {
@@ -145,7 +151,7 @@ public class FSTableDescriptors implements TableDescriptors {
     invocations++;
     if (TableName.META_TABLE_NAME.equals(tablename)) {
       cachehits++;
-      return HTableDescriptor.META_TABLEDESC;
+      return metaTableDescriptor;
     }
     // hbase:meta is already handled. If some one tries to get the descriptor for
     // .logs, .oldlogs or .corrupt throw an exception.

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index d0c749a..a0fbda9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -1181,10 +1181,10 @@ public class HBaseFsck extends Configured {
       Path rootdir = FSUtils.getRootDir(getConf());
     Configuration c = getConf();
     HRegionInfo metaHRI = new HRegionInfo(HRegionInfo.FIRST_META_REGIONINFO);
-    MasterFileSystem.setInfoFamilyCachingForMeta(false);
-    HRegion meta = HRegion.createHRegion(metaHRI, rootdir, c,
-        HTableDescriptor.META_TABLEDESC);
-    MasterFileSystem.setInfoFamilyCachingForMeta(true);
+    HTableDescriptor metaDescriptor = new FSTableDescriptors(c).get(TableName.META_TABLE_NAME);
+    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, false);
+    HRegion meta = HRegion.createHRegion(metaHRI, rootdir, c, metaDescriptor);
+    MasterFileSystem.setInfoFamilyCachingForMeta(metaDescriptor, true);
     return meta;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java
index 7f9e019..1ff4967 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALFactory;
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.wal.WALFactory;
 public class MetaUtils {
   private static final Log LOG = LogFactory.getLog(MetaUtils.class);
   private final Configuration conf;
+  private final FSTableDescriptors descriptors;
   private FileSystem fs;
   private WALFactory walFactory;
   private HRegion metaRegion;
@@ -69,6 +70,7 @@ public class MetaUtils {
     this.conf = conf;
     conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
     this.metaRegion = null;
+    this.descriptors = new FSTableDescriptors(conf);
     initialize();
   }
 
@@ -146,7 +148,7 @@ public class MetaUtils {
       return this.metaRegion;
     }
     this.metaRegion = HRegion.openHRegion(HRegionInfo.FIRST_META_REGIONINFO,
-      HTableDescriptor.META_TABLEDESC, getLog(HRegionInfo.FIRST_META_REGIONINFO),
+      descriptors.get(TableName.META_TABLE_NAME), getLog(HRegionInfo.FIRST_META_REGIONINFO),
       this.conf);
     this.metaRegion.compactStores();
     return this.metaRegion;

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 8dba04c..5b58bbe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 
@@ -74,6 +75,14 @@ public abstract class HBaseTestCase extends TestCase {
   protected final HBaseTestingUtility testUtil = new HBaseTestingUtility();
 
   public volatile Configuration conf = HBaseConfiguration.create();
+  public final FSTableDescriptors fsTableDescriptors;
+  {
+    try {
+      fsTableDescriptors = new FSTableDescriptors(conf);
+    } catch (IOException e) {
+      throw new RuntimeException("Failed to init descriptors", e);
+    }
+  }
 
   /** constructor */
   public HBaseTestCase() {
@@ -630,8 +639,9 @@ public abstract class HBaseTestCase extends TestCase {
    * @throws IOException
    */
   protected void createMetaRegion() throws IOException {
-    meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, testDir,
-        conf, HTableDescriptor.META_TABLEDESC);
+    FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(conf);
+    meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, testDir, conf,
+      fsTableDescriptors.get(TableName.META_TABLE_NAME));
   }
 
   protected void closeRootAndMeta() throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 6e48466..579caab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -93,6 +93,7 @@ import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.tool.Canary;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
@@ -376,6 +377,17 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   }
 
   /**
+   * @return META table descriptor
+   */
+  public HTableDescriptor getMetaTableDescriptor() {
+    try {
+      return new FSTableDescriptors(conf).get(TableName.META_TABLE_NAME);
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to create META table descriptor", e);
+    }
+  }
+
+  /**
    * @return Where the DFS cluster will write data on the local subsystem.
    * Creates it if it does not exist already.  A subdir of {@link #getBaseTestDir()}
    * @see #getTestFileSystem()

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
index b4085ca..906dfee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
@@ -39,7 +39,7 @@ public class TestFSTableDescriptorForceCreation {
     final String name = "newTable2";
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
 
     assertTrue("Should create new table descriptor", fstd.createTableDescriptor(htd, false));
@@ -52,7 +52,7 @@ public class TestFSTableDescriptorForceCreation {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     // Cleanup old tests if any detritus laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     HTableDescriptor htd = new HTableDescriptor(name);
     fstd.add(htd);
     assertFalse("Should not create new table descriptor", fstd.createTableDescriptor(htd, false));
@@ -64,7 +64,7 @@ public class TestFSTableDescriptorForceCreation {
     final String name = "createNewTableNew2";
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
     fstd.createTableDescriptor(htd, false);
     assertTrue("Should create new table descriptor",

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 1f4d865..1ccad92 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -111,7 +111,7 @@ public class TestAdmin2 {
   public void testCreateBadTables() throws IOException {
     String msg = null;
     try {
-      this.admin.createTable(HTableDescriptor.META_TABLEDESC);
+      this.admin.createTable(new HTableDescriptor(TableName.META_TABLE_NAME));
     } catch(TableExistsException e) {
       msg = e.toString();
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
index 84a3e07..6ddd9b0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
@@ -206,7 +206,7 @@ public class TestMasterFailover {
 
     FileSystem filesystem = FileSystem.get(conf);
     Path rootdir = FSUtils.getRootDir(conf);
-    FSTableDescriptors fstd = new FSTableDescriptors(filesystem, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(conf, filesystem, rootdir);
     // Write the .tableinfo
     fstd.createTableDescriptor(htdEnabled);
 
@@ -553,7 +553,7 @@ public class TestMasterFailover {
     htdEnabled.addFamily(new HColumnDescriptor(FAMILY));
     FileSystem filesystem = FileSystem.get(conf);
     Path rootdir = FSUtils.getRootDir(conf);
-    FSTableDescriptors fstd = new FSTableDescriptors(filesystem, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(conf, filesystem, rootdir);
     // Write the .tableinfo
     fstd.createTableDescriptor(htdEnabled);
     HRegionInfo hriEnabled = new HRegionInfo(htdEnabled.getTableName(),
@@ -1214,7 +1214,7 @@ public class TestMasterFailover {
 
     FileSystem filesystem = FileSystem.get(conf);
     Path rootdir = FSUtils.getRootDir(conf);
-    FSTableDescriptors fstd = new FSTableDescriptors(filesystem, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(conf, filesystem, rootdir);
     fstd.createTableDescriptor(offlineTable);
 
     HRegionInfo hriOffline = new HRegionInfo(offlineTable.getTableName(), null, null);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java
index d030b72..2be15be 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestNamespaceUpgrade.java
@@ -305,7 +305,7 @@ public class TestNamespaceUpgrade {
     // Create a Region
     HTableDescriptor aclTable = new HTableDescriptor(TableName.valueOf("testACLTable"));
     aclTable.addFamily(new HColumnDescriptor(FAMILY));
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, rootDir);
+    FSTableDescriptors fstd = new FSTableDescriptors(conf, fs, rootDir);
     fstd.createTableDescriptor(aclTable);
     HRegionInfo hriAcl = new HRegionInfo(aclTable.getTableName(), null, null);
     HRegion region = HRegion.createHRegion(hriAcl, rootDir, conf, aclTable);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
index b2eaa87..c00f68f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
@@ -68,10 +68,9 @@ public class TestGetClosestAtOrBefore extends HBaseTestCase {
     FileSystem filesystem = FileSystem.get(conf);
     Path rootdir = testDir;
     // Up flush size else we bind up when we use default catalog flush of 16k.
-    HTableDescriptor.META_TABLEDESC.setMemStoreFlushSize(64 * 1024 * 1024);
-
+    fsTableDescriptors.get(TableName.META_TABLE_NAME).setMemStoreFlushSize(64 * 1024 * 1024);
     HRegion mr = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO,
-      rootdir, this.conf, HTableDescriptor.META_TABLEDESC);
+      rootdir, this.conf, fsTableDescriptors.get(TableName.META_TABLE_NAME));
     try {
     // Write rows for three tables 'A', 'B', and 'C'.
     for (char c = 'A'; c < 'D'; c++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
index db4e5cf..90adac8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.util.MD5Hash;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -58,15 +59,16 @@ public class TestHRegionInfo {
     HBaseTestingUtility htu = new HBaseTestingUtility();
     HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO;
     Path basedir = htu.getDataTestDir();
+    FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration());
     // Create a region.  That'll write the .regioninfo file.
     HRegion r = HRegion.createHRegion(hri, basedir, htu.getConfiguration(),
-      HTableDescriptor.META_TABLEDESC);
+      fsTableDescriptors.get(TableName.META_TABLE_NAME));
     // Get modtime on the file.
     long modtime = getModTime(r);
     HRegion.closeHRegion(r);
     Thread.sleep(1001);
-    r = HRegion.openHRegion(basedir, hri, HTableDescriptor.META_TABLEDESC,
-        null, htu.getConfiguration());
+    r = HRegion.openHRegion(basedir, hri, fsTableDescriptors.get(TableName.META_TABLE_NAME),
+      null, htu.getConfiguration());
     // Ensure the file is not written for a second time.
     long modtime2 = getModTime(r);
     assertEquals(modtime, modtime2);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
index 9e11413..6235f3e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
@@ -72,7 +72,7 @@ public class TestFSTableDescriptors {
     Path testdir = UTIL.getDataTestDir("testCreateAndUpdate");
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testCreate"));
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, testdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, testdir);
     assertTrue(fstd.createTableDescriptor(htd));
     assertFalse(fstd.createTableDescriptor(htd));
     FileStatus[] statuses = fs.listStatus(testdir);
@@ -93,7 +93,7 @@ public class TestFSTableDescriptors {
     HTableDescriptor htd = new HTableDescriptor(
         TableName.valueOf("testSequenceidAdvancesOnTableInfo"));
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, testdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, testdir);
     Path p0 = fstd.updateTableDescriptor(htd);
     int i0 = FSTableDescriptors.getTableInfoSequenceId(p0);
     Path p1 = fstd.updateTableDescriptor(htd);
@@ -145,7 +145,7 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     // Cleanup old tests if any detrius laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    TableDescriptors htds = new FSTableDescriptors(fs, rootdir);
+    TableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
     htds.add(htd);
     assertNotNull(htds.remove(htd.getTableName()));
@@ -159,7 +159,7 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
     Path rootdir = UTIL.getDataTestDir(name);
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, rootdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     fstd.createTableDescriptor(htd);
     HTableDescriptor htd2 =
       FSTableDescriptors.getTableDescriptorFromFs(fs, rootdir, htd.getTableName());
@@ -213,7 +213,7 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     // Cleanup old tests if any debris laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    FSTableDescriptors htds = new FSTableDescriptors(fs, rootdir,
+    FSTableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir,
       false, false);
     final int count = 10;
     // Write out table infos.
@@ -315,7 +315,7 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     // Cleanup old tests if any detrius laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    TableDescriptors htds = new FSTableDescriptors(fs, rootdir);
+    TableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     assertNull("There shouldn't be any HTD for this table", htds.get(TableName.valueOf("NoSuchTable")));
   }
 
@@ -325,7 +325,7 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     // Cleanup old tests if any detrius laying around.
     Path rootdir = new Path(UTIL.getDataTestDir(), name);
-    TableDescriptors htds = new FSTableDescriptors(fs, rootdir);
+    TableDescriptors htds = new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir);
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name));
     htds.add(htd);
     htds.add(htd);
@@ -363,7 +363,8 @@ public class TestFSTableDescriptors {
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
     try {
       // .tmp dir is an invalid table name
-      new FSTableDescriptors(fs, FSUtils.getRootDir(UTIL.getConfiguration()))
+      new FSTableDescriptors(UTIL.getConfiguration(), fs,
+        FSUtils.getRootDir(UTIL.getConfiguration()))
           .get(TableName.valueOf(HConstants.HBASE_TEMP_DIRECTORY));
       fail("Shouldn't be able to read a table descriptor for the archive directory.");
     } catch (Exception e) {
@@ -378,7 +379,7 @@ public class TestFSTableDescriptors {
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(
         "testCreateTableDescriptorUpdatesIfThereExistsAlready"));
     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
-    FSTableDescriptors fstd = new FSTableDescriptors(fs, testdir);
+    FSTableDescriptors fstd = new FSTableDescriptors(UTIL.getConfiguration(), fs, testdir);
     assertTrue(fstd.createTableDescriptor(htd));
     assertFalse(fstd.createTableDescriptor(htd));
     htd.setValue(Bytes.toBytes("mykey"), Bytes.toBytes("myValue"));
@@ -401,7 +402,7 @@ public class TestFSTableDescriptors {
 
     public FSTableDescriptorsTest(FileSystem fs, Path rootdir, boolean fsreadonly, boolean usecache)
     throws IOException {
-      super(fs, rootdir, fsreadonly, usecache);
+      super(UTIL.getConfiguration(), fs, rootdir, fsreadonly, usecache);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
index 4f6e3ab..6477741 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
@@ -2267,7 +2267,7 @@ public class TestHBaseFsck {
       LOG.info("deleting hdfs .regioninfo data: " + hri.toString() + hsa.toString());
       Path rootDir = FSUtils.getRootDir(conf);
       FileSystem fs = rootDir.getFileSystem(conf);
-      Path p = new Path(rootDir + "/" + HTableDescriptor.META_TABLEDESC.getNameAsString(),
+      Path p = new Path(rootDir + "/" + TableName.META_TABLE_NAME.getNameAsString(),
           hri.getEncodedName());
       Path hriPath = new Path(p, HRegionFileSystem.REGION_INFO_FILE);
       fs.delete(hriPath, true);
@@ -2277,7 +2277,7 @@ public class TestHBaseFsck {
       LOG.info("deleting hdfs data: " + hri.toString() + hsa.toString());
       Path rootDir = FSUtils.getRootDir(conf);
       FileSystem fs = rootDir.getFileSystem(conf);
-      Path p = new Path(rootDir + "/" + HTableDescriptor.META_TABLEDESC.getNameAsString(),
+      Path p = new Path(rootDir + "/" + TableName.META_TABLE_NAME.getNameAsString(),
           hri.getEncodedName());
       HBaseFsck.debugLsr(conf, p);
       boolean success = fs.delete(p, true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
index e3910db..f6edde4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
@@ -96,7 +96,7 @@ public class TestMergeTable {
 
     // Create regions and populate them at same time.  Create the tabledir
     // for them first.
-    new FSTableDescriptors(fs, rootdir).createTableDescriptor(desc);
+    new FSTableDescriptors(UTIL.getConfiguration(), fs, rootdir).createTableDescriptor(desc);
     HRegion [] regions = {
       createRegion(desc, null, row_70001, 1, 70000, rootdir),
       createRegion(desc, row_70001, row_80001, 70001, 10000, rootdir),
@@ -161,7 +161,7 @@ public class TestMergeTable {
   throws IOException {
     HRegion meta =
       HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, rootdir,
-      UTIL.getConfiguration(), HTableDescriptor.META_TABLEDESC);
+      UTIL.getConfiguration(), UTIL.getMetaTableDescriptor());
     for (HRegion r: regions) {
       HRegion.addRegionToMETA(meta, r);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2e47c37b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
index 5ab0d43..fb63276 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
@@ -148,7 +148,7 @@ public class TestMergeTool extends HBaseTestCase {
     try {
       // Create meta region
       createMetaRegion();
-      new FSTableDescriptors(this.fs, this.testDir).createTableDescriptor(this.desc);
+      new FSTableDescriptors(conf, this.fs, this.testDir).createTableDescriptor(this.desc);
       /*
        * Create the regions we will merge
        */