You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2016/11/01 22:40:50 UTC

hbase git commit: HBASE-16978 Disable backup by default (Vladimir Rodionov)

Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 3d2256428 -> cd41083dc


HBASE-16978 Disable backup by default (Vladimir Rodionov)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cd41083d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cd41083d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cd41083d

Branch: refs/heads/HBASE-7912
Commit: cd41083dc47f1ebfa45e15f3786e1970c30b331d
Parents: 3d22564
Author: tedyu <yu...@gmail.com>
Authored: Tue Nov 1 15:40:40 2016 -0700
Committer: tedyu <yu...@gmail.com>
Committed: Tue Nov 1 15:40:40 2016 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/backup/BackupDriver.java       |   9 +
 .../hbase/backup/BackupRestoreConstants.java    |   2 +-
 .../hadoop/hbase/backup/RestoreDriver.java      |  11 +-
 .../hadoop/hbase/backup/impl/BackupManager.java |  57 +++---
 .../hadoop/hbase/HBaseTestingUtility.java       |   1 -
 .../org/apache/hadoop/hbase/TestNamespace.java  |   3 +-
 .../hadoop/hbase/backup/TestBackupBase.java     |  23 +--
 .../hbase/backup/TestBackupCommandLineTool.java | 179 ++++++++++---------
 .../hbase/backup/TestBackupSystemTable.java     |  27 +--
 .../hbase/client/TestMetaWithReplicas.java      |   4 +-
 .../hbase/coprocessor/TestClassLoading.java     |  49 +++--
 ...TestMasterCoprocessorExceptionWithAbort.java |   2 +
 ...estMasterCoprocessorExceptionWithRemove.java |   2 +
 .../master/TestDistributedLogSplitting.java     |   7 +-
 .../hadoop/hbase/master/TestMasterFailover.java |   5 +-
 .../TestMasterOperationsForRegionReplicas.java  |   6 +-
 .../TestMasterRestartAfterDisablingTable.java   |   2 +
 .../hadoop/hbase/master/TestRestartCluster.java |   3 +
 .../hadoop/hbase/master/TestRollingRestart.java |   2 +
 .../regionserver/TestRegionServerMetrics.java   |  44 ++++-
 .../security/access/TestTablePermissions.java   |  10 +-
 .../visibility/TestVisibilityLabelsWithACL.java |  15 +-
 .../hadoop/hbase/util/TestHBaseFsckOneRS.java   |  66 ++++---
 .../util/hbck/OfflineMetaRebuildTestCore.java   |   3 +-
 24 files changed, 313 insertions(+), 219 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
index 07f39b6..099e418 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupDriver.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.backup.impl.BackupCommands;
+import org.apache.hadoop.hbase.backup.impl.BackupManager;
 import org.apache.hadoop.hbase.backup.util.LogUtils;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
@@ -54,6 +55,14 @@ public class BackupDriver extends AbstractHBaseTool implements BackupRestoreCons
 
   private int parseAndRun(String[] args) throws IOException {
 
+    // Check if backup is enabled
+    if (!BackupManager.isBackupEnabled(getConf())) {
+      System.err.println("Backup is not enabled. To enable backup, "+
+          "set \'hbase.backup.enabled'=true and restart "+
+          "the cluster");
+      return -1;
+    }
+
     String cmd = null;
     String[] remainArgs = null;
     if (args == null || args.length == 0) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
index 5c869f6..eff45cc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
@@ -79,7 +79,7 @@ public interface BackupRestoreConstants {
    * Backup/Restore constants
    */
   public final static String BACKUP_ENABLE_KEY = "hbase.backup.enable";
-  public final static boolean BACKUP_ENABLE_DEFAULT = true;
+  public final static boolean BACKUP_ENABLE_DEFAULT = false;
   public final static String BACKUP_SYSTEM_TTL_KEY = "hbase.backup.system.ttl";
   public final static int BACKUP_SYSTEM_TTL_DEFAULT = HConstants.FOREVER;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
index ca1b2de..336060f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.impl.BackupManager;
 import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 import org.apache.hadoop.hbase.backup.impl.HBaseBackupAdmin;
 import org.apache.hadoop.hbase.backup.util.BackupServerUtil;
@@ -66,6 +67,13 @@ public class RestoreDriver extends AbstractHBaseTool implements BackupRestoreCon
   }
 
   private int parseAndRun(String[] args) throws IOException {
+    // Check if backup is enabled
+    if (!BackupManager.isBackupEnabled(getConf())) {
+      System.err.println("Backup is not enabled. To enable backup, "+
+          "set \'hbase.backup.enabled'=true and restart "+
+          "the cluster");
+      return -1;
+    }
     // enable debug logging
     Logger backupClientLogger = Logger.getLogger("org.apache.hadoop.hbase.backup");
     if (cmd.hasOption(OPTION_DEBUG)) {
@@ -126,7 +134,7 @@ public class RestoreDriver extends AbstractHBaseTool implements BackupRestoreCon
       TableName[] sTableArray = BackupServerUtil.parseTableNames(tables);
       TableName[] tTableArray = BackupServerUtil.parseTableNames(tableMapping);
 
-      if (sTableArray != null && tTableArray != null 
+      if (sTableArray != null && tTableArray != null
           && (sTableArray.length != tTableArray.length)) {
         System.out.println("ERROR: table mapping mismatch: " + tables + " : " + tableMapping);
         printToolUsage();
@@ -216,6 +224,7 @@ public class RestoreDriver extends AbstractHBaseTool implements BackupRestoreCon
     return ret;
   }
 
+  @Override
   protected boolean sanityCheckOptions(CommandLine cmd) {
     boolean success = true;
     for (String reqOpt : requiredOptions) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
index 0aad830..af715d4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java
@@ -38,10 +38,10 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.backup.BackupInfo;
+import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
 import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.backup.BackupType;
 import org.apache.hadoop.hbase.backup.HBackupFileSystem;
-import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
 import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
 import org.apache.hadoop.hbase.backup.master.BackupController;
 import org.apache.hadoop.hbase.backup.master.BackupLogCleaner;
@@ -49,10 +49,8 @@ import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
 import org.apache.hadoop.hbase.backup.regionserver.LogRollRegionServerProcedureManager;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ClusterConnection;
-import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.client.Connection;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
@@ -65,16 +63,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 @InterfaceStability.Evolving
 public class BackupManager implements Closeable {
   private static final Log LOG = LogFactory.getLog(BackupManager.class);
-
   private Configuration conf = null;
   private BackupInfo backupContext = null;
-
   private ExecutorService pool = null;
-
-  private boolean backupComplete = false;
-
   private BackupSystemTable systemTable;
-
   private final Connection conn;
 
   /**
@@ -90,7 +82,7 @@ public class BackupManager implements Closeable {
     this.conf = conf;
     this.conn = conn;
     this.systemTable = new BackupSystemTable(conn);
-     
+
   }
 
   /**
@@ -113,31 +105,31 @@ public class BackupManager implements Closeable {
     String cleanerClass = BackupLogCleaner.class.getCanonicalName();
     if (!plugins.contains(cleanerClass)) {
       conf.set(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + cleanerClass);
-    }    
-    
+    }
+
     String classes = conf.get("hbase.procedure.master.classes");
     String masterProcedureClass = LogRollMasterProcedureManager.class.getName();
-    if(classes == null){    
+    if(classes == null){
       conf.set("hbase.procedure.master.classes", masterProcedureClass);
     } else if(!classes.contains(masterProcedureClass)){
       conf.set("hbase.procedure.master.classes", classes +","+masterProcedureClass);
-    }    
- 
+    }
+
     // Set Master Observer - Backup Controller
     classes = conf.get("hbase.coprocessor.master.classes");
     String observerClass = BackupController.class.getName();
-    if(classes == null){    
+    if(classes == null){
       conf.set("hbase.coprocessor.master.classes", observerClass);
     } else if(!classes.contains(observerClass)){
       conf.set("hbase.coprocessor.master.classes", classes +","+observerClass);
-    }    
+    }
 
     if (LOG.isDebugEnabled()) {
       LOG.debug("Added log cleaner: " + cleanerClass);
       LOG.debug("Added master procedure manager: "+masterProcedureClass);
-      LOG.debug("Added master observer: "+observerClass);      
+      LOG.debug("Added master observer: "+observerClass);
     }
-    
+
   }
 
   /**
@@ -148,23 +140,24 @@ public class BackupManager implements Closeable {
     if (!isBackupEnabled(conf)) {
       return;
     }
-    
+
     String classes = conf.get("hbase.procedure.regionserver.classes");
     String regionProcedureClass = LogRollRegionServerProcedureManager.class.getName();
-    if(classes == null){    
+    if(classes == null){
       conf.set("hbase.procedure.regionserver.classes", regionProcedureClass);
     } else if(!classes.contains(regionProcedureClass)){
       conf.set("hbase.procedure.regionserver.classes", classes +","+regionProcedureClass);
-    }    
+    }
     if (LOG.isDebugEnabled()) {
       LOG.debug("Added region procedure manager: "+regionProcedureClass);
     }
-    
+
   }
-  
-  
-  private static boolean isBackupEnabled(Configuration conf) {
-    return conf.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, BackupRestoreConstants.BACKUP_ENABLE_DEFAULT);
+
+
+  public static boolean isBackupEnabled(Configuration conf) {
+    return conf.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY,
+      BackupRestoreConstants.BACKUP_ENABLE_DEFAULT);
   }
 
   /**
@@ -242,7 +235,7 @@ public class BackupManager implements Closeable {
     }
 
     // there are one or more tables in the table list
-    backupContext = new BackupInfo(backupId, type, 
+    backupContext = new BackupInfo(backupId, type,
       tableList.toArray(new TableName[tableList.size()]),
       targetRootDir);
     backupContext.setBandwidth(bandwidth);
@@ -301,7 +294,7 @@ public class BackupManager implements Closeable {
    */
   public ArrayList<BackupImage> getAncestors(BackupInfo backupCtx) throws IOException,
       BackupException {
-    LOG.debug("Getting the direct ancestors of the current backup "+ 
+    LOG.debug("Getting the direct ancestors of the current backup "+
       backupCtx.getBackupId());
 
     ArrayList<BackupImage> ancestors = new ArrayList<BackupImage>();
@@ -448,7 +441,7 @@ public class BackupManager implements Closeable {
    */
   public void writeRegionServerLogTimestamp(Set<TableName> tables,
       HashMap<String, Long> newTimestamps) throws IOException {
-    systemTable.writeRegionServerLogTimestamp(tables, newTimestamps, 
+    systemTable.writeRegionServerLogTimestamp(tables, newTimestamps,
       backupContext.getTargetRootDir());
   }
 
@@ -487,7 +480,7 @@ public class BackupManager implements Closeable {
    * safely purged.
    */
   public void recordWALFiles(List<String> files) throws IOException {
-    systemTable.addWALFiles(files, 
+    systemTable.addWALFiles(files,
       backupContext.getBackupId(), backupContext.getTargetRootDir());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index ae19137..cdd1e71 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1064,7 +1064,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   throws IOException, InterruptedException {
     // Now do the mini hbase cluster.  Set the hbase.rootdir in config.
     createRootDir(create);
-
     // These settings will make the server waits until this exact number of
     // regions servers are connected.
     if (conf.getInt(ServerManager.WAIT_ON_REGIONSERVERS_MINTOSTART, -1) == -1) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
index d1af4b6..56015b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
@@ -34,6 +33,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
@@ -66,6 +66,7 @@ public class TestNamespace {
   @BeforeClass
   public static void setUp() throws Exception {
     TEST_UTIL = new HBaseTestingUtility();
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(NUM_SLAVES_BASE);
     admin = TEST_UTIL.getHBaseAdmin();
     cluster = TEST_UTIL.getHBaseCluster();

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
index 2da7871..3eeac05 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
@@ -97,6 +97,7 @@ public class TestBackupBase {
   public static void setUpBeforeClass() throws Exception {
     TEST_UTIL = new HBaseTestingUtility();
     conf1 = TEST_UTIL.getConfiguration();
+    conf1.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
     // Set MultiWAL (with 2 default WAL files per RS)
     conf1.set(WALFactory.WAL_PROVIDER, provider);
@@ -120,22 +121,22 @@ public class TestBackupBase {
     createTables();
     populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1);
   }
-  
+
   private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) {
-    Iterator<Entry<String,String>> it = masterConf.iterator();  
+    Iterator<Entry<String,String>> it = masterConf.iterator();
     while(it.hasNext()) {
       Entry<String,String> e = it.next();
       conf.set(e.getKey(), e.getValue());
     }
   }
-  
+
   public static void waitForSystemTable() throws Exception
   {
     try (Admin admin = TEST_UTIL.getAdmin();) {
-      while (!admin.tableExists(BackupSystemTable.getTableName()) 
+      while (!admin.tableExists(BackupSystemTable.getTableName())
           || !admin.isTableAvailable(BackupSystemTable.getTableName())) {
         Thread.sleep(1000);
-      }      
+      }
     }
     LOG.debug("backup table exists and available");
 
@@ -173,7 +174,7 @@ public class TestBackupBase {
     try {
       conn = ConnectionFactory.createConnection(conf1);
       badmin = new HBaseBackupAdmin(conn);
-      BackupRequest request = new BackupRequest();      
+      BackupRequest request = new BackupRequest();
       request.setBackupType(type).setTableList(tables).setTargetRootDir(path);
       backupId = badmin.backupTables(request);
     } finally {
@@ -194,7 +195,7 @@ public class TestBackupBase {
   protected String incrementalTableBackup(List<TableName> tables) throws IOException {
     return backupTables(BackupType.INCREMENTAL, tables, BACKUP_ROOT_DIR);
   }
-  
+
   protected static void loadTable(HTable table) throws Exception {
 
     Put p; // 100 + 1 row to t1_syncup
@@ -211,19 +212,19 @@ public class TestBackupBase {
     long tid = System.currentTimeMillis();
     table1 = TableName.valueOf("ns1:test-" + tid);
     HBaseAdmin ha = TEST_UTIL.getHBaseAdmin();
-    
+
     // Create namespaces
     NamespaceDescriptor desc1 = NamespaceDescriptor.create("ns1").build();
     NamespaceDescriptor desc2 = NamespaceDescriptor.create("ns2").build();
     NamespaceDescriptor desc3 = NamespaceDescriptor.create("ns3").build();
     NamespaceDescriptor desc4 = NamespaceDescriptor.create("ns4").build();
-    
+
     ha.createNamespace(desc1);
     ha.createNamespace(desc2);
     ha.createNamespace(desc3);
     ha.createNamespace(desc4);
 
-    
+
     HTableDescriptor desc = new HTableDescriptor(table1);
     HColumnDescriptor fam = new HColumnDescriptor(famName);
     desc.addFamily(fam);
@@ -283,7 +284,7 @@ public class TestBackupBase {
     }
     return ret;
   }
-    
+
   protected void dumpBackupDir() throws IOException
   {
     // Dump Backup Dir

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
index 89e5da6..3a632fc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
@@ -36,13 +36,14 @@ public class TestBackupCommandLineTool {
   @Before
   public void setUpBefore() throws Exception {
     conf = HBaseConfiguration.create();
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
   }
 
   @Test
   public void testBackupDriverDescribeHelp() throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"describe", "-help" }; 
+    String[] args = new String[]{"describe", "-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
@@ -51,16 +52,16 @@ public class TestBackupCommandLineTool {
 
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"describe", "-h" }; 
+    args = new String[]{"describe", "-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
-    assertTrue(output.indexOf("Usage: bin/hbase backup describe <backupId>") >= 0);     
-    
+    assertTrue(output.indexOf("Usage: bin/hbase backup describe <backupId>") >= 0);
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"describe" }; 
+    args = new String[]{"describe" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     output = baos.toString();
@@ -72,37 +73,37 @@ public class TestBackupCommandLineTool {
   public void testBackupDriverCreateHelp() throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"create", "-help" }; 
+    String[] args = new String[]{"create", "-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"create", "-h" }; 
+    args = new String[]{"create", "-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"create"}; 
+    args = new String[]{"create"};
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
-    assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);    
+    assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);
   }
 
   @Test
   public void testBackupDriverHistoryHelp () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"history", "-help" }; 
+    String[] args = new String[]{"history", "-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
@@ -111,20 +112,20 @@ public class TestBackupCommandLineTool {
 
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"history", "-h" }; 
+    args = new String[]{"history", "-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup history") >= 0);
-    
+
   }
 
   @Test
   public void testBackupDriverDeleteHelp () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"delete", "-help" }; 
+    String[] args = new String[]{"delete", "-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
@@ -133,39 +134,39 @@ public class TestBackupCommandLineTool {
 
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"delete", "-h" }; 
+    args = new String[]{"delete", "-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup delete") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"delete" }; 
+    args = new String[]{"delete" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     output = baos.toString();
     System.out.println(baos.toString());
-    assertTrue(output.indexOf("Usage: bin/hbase backup delete") >= 0);    
+    assertTrue(output.indexOf("Usage: bin/hbase backup delete") >= 0);
   }
 
   @Test
   public void testBackupDriverProgressHelp() throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"progress", "-help" }; 
+    String[] args = new String[]{"progress", "-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup progress") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"progress", "-h" }; 
+    args = new String[]{"progress", "-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup progress") >= 0);
@@ -175,238 +176,238 @@ public class TestBackupCommandLineTool {
   public void testBackupDriverSetHelp () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"set", "-help" }; 
+    String[] args = new String[]{"set", "-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup set") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"set", "-h" }; 
+    args = new String[]{"set", "-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup set") >= 0);
 
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"set"}; 
+    args = new String[]{"set"};
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup set") >= 0);
-    
+
   }
-  
+
   @Test
   public void testBackupDriverHelp () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"-help" }; 
+    String[] args = new String[]{"-help" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"-h" }; 
+    args = new String[]{"-h" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
   }
-  
+
   @Test
   public void testRestoreDriverHelp () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"-help" }; 
+    String[] args = new String[]{"-help" };
     ToolRunner.run(conf, new RestoreDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase restore") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"-h" }; 
+    args = new String[]{"-h" };
     ToolRunner.run(conf, new RestoreDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase restore") >= 0);
   }
-  
+
   @Test
   public void testBackupDriverUnrecognizedCommand () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"command" }; 
+    String[] args = new String[]{"command" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"command" }; 
+    args = new String[]{"command" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
   }
-  
-  
-  
+
+
+
   @Test
   public void testBackupDriverUnrecognizedOption () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"create", "-xx" }; 
+    String[] args = new String[]{"create", "-xx" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"describe", "-xx" }; 
+    args = new String[]{"describe", "-xx" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"history", "-xx" }; 
+    args = new String[]{"history", "-xx" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"delete", "-xx" }; 
+    args = new String[]{"delete", "-xx" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"set", "-xx" }; 
+    args = new String[]{"set", "-xx" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup") >= 0);
   }
-  
+
   @Test
   public void testRestoreDriverUnrecognizedOption () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"-xx" }; 
+    String[] args = new String[]{"-xx" };
     ToolRunner.run(conf, new RestoreDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase restore") >= 0);
-     
+
   }
-  
+
   @Test
   public void testBackupDriverCreateWrongArgNumber () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"create" }; 
+    String[] args = new String[]{"create" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"create", "22" }; 
+    args = new String[]{"create", "22" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);
-    
+
     baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    args = new String[]{"create", "22", "22", "22", "22", "22" }; 
+    args = new String[]{"create", "22", "22", "22", "22", "22" };
     ToolRunner.run(conf, new BackupDriver(), args);
-    
+
     output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup create") >= 0);
   }
-  
+
   @Test
   public void testBackupDriverDeleteWrongArgNumber () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"delete" }; 
+    String[] args = new String[]{"delete" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup delete") >= 0);
-    
+
   }
-    
+
   @Test
   public void testBackupDriverHistoryWrongArgs () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"history", "-n", "xx" }; 
+    String[] args = new String[]{"history", "-n", "xx" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("Usage: bin/hbase backup history") >= 0);
-    
+
   }
-  
+
   @Test
   public void testBackupDriverWrongBackupDestination () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"create", "full", "clicks" }; 
+    String[] args = new String[]{"create", "full", "clicks" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("ERROR: invalid backup destination") >= 0);
-    
+
   }
-  
+
   @Test
   public void testBackupDriverBackupSetAndList () throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     System.setOut(new PrintStream(baos));
-    String[] args = new String[]{"create", "full", "file:/", "clicks", "-s", "s" }; 
+    String[] args = new String[]{"create", "full", "file:/", "clicks", "-s", "s" };
     ToolRunner.run(conf, new BackupDriver(), args);
 
     String output = baos.toString();
     System.out.println(baos.toString());
     assertTrue(output.indexOf("ERROR: You can specify either backup set or list") >= 0);
-    
+
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSystemTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSystemTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSystemTable.java
index 3488786..d8a8739 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSystemTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupSystemTable.java
@@ -65,21 +65,22 @@ public class TestBackupSystemTable {
 
   @BeforeClass
   public static void setUp() throws Exception {
-    cluster = UTIL.startMiniCluster(); 
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
+    cluster = UTIL.startMiniCluster();
     conn = UTIL.getConnection();
     waitForSystemTable();
   }
-  
+
   static void waitForSystemTable() throws Exception
   {
     try(Admin admin = UTIL.getAdmin();) {
-      while (!admin.tableExists(BackupSystemTable.getTableName()) 
+      while (!admin.tableExists(BackupSystemTable.getTableName())
           || !admin.isTableAvailable(BackupSystemTable.getTableName())) {
         Thread.sleep(1000);
-      }      
+      }
     }
   }
-  
+
   @Before
   public void before() throws IOException {
     table = new BackupSystemTable(conn);
@@ -156,7 +157,7 @@ public class TestBackupSystemTable {
 
   @Test
   public void testBackupDelete() throws IOException {
-    
+
     try (BackupSystemTable table = new BackupSystemTable(conn)) {
 
       int n = 10;
@@ -189,8 +190,8 @@ public class TestBackupSystemTable {
 
   }
 
-  
-  
+
+
   @Test
   public void testRegionServerLastLogRollResults() throws IOException {
     String[] servers = new String[] { "server1", "server2", "server3" };
@@ -233,7 +234,7 @@ public class TestBackupSystemTable {
 
     table.addIncrementalBackupTableSet(tables1, "root");
     BackupSystemTable table = new BackupSystemTable(conn);
-    TreeSet<TableName> res1 = (TreeSet<TableName>) 
+    TreeSet<TableName> res1 = (TreeSet<TableName>)
         table.getIncrementalBackupTableSet("root");
     assertTrue(tables1.size() == res1.size());
     Iterator<TableName> desc1 = tables1.descendingIterator();
@@ -243,7 +244,7 @@ public class TestBackupSystemTable {
     }
 
     table.addIncrementalBackupTableSet(tables2, "root");
-    TreeSet<TableName> res2 = (TreeSet<TableName>) 
+    TreeSet<TableName> res2 = (TreeSet<TableName>)
         table.getIncrementalBackupTableSet("root");
     assertTrue((tables2.size() + tables1.size() - 1) == res2.size());
 
@@ -349,7 +350,7 @@ public class TestBackupSystemTable {
     cleanBackupTable();
   }
 
-  
+
   /**
    * Backup set tests
    */
@@ -409,7 +410,7 @@ public class TestBackupSystemTable {
         assertTrue(tnames.get(i).getNameAsString().equals("table" + (i + 1)));
       }
       cleanBackupTable();
-    } 
+    }
   }
 
   @Test
@@ -486,7 +487,7 @@ public class TestBackupSystemTable {
       cleanBackupTable();
     }
   }
-   
+
 
   private boolean compare(BackupInfo one, BackupInfo two) {
     return one.getBackupId().equals(two.getBackupId()) && one.getType().equals(two.getType())

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
index 5922f21..173a551 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -82,8 +83,9 @@ public class TestMetaWithReplicas {
     TEST_UTIL.getConfiguration().setInt(HConstants.META_REPLICAS_NUM, 3);
     TEST_UTIL.getConfiguration().setInt(
         StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, 1000);
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(3);
-    
+
     TEST_UTIL.waitUntilAllSystemRegionsAssigned();
 
     // disable the balancer

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index 762ee36..5728cd9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -18,10 +18,36 @@
  */
 package org.apache.hadoop.hbase.coprocessor;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.RegionLoad;
+import org.apache.hadoop.hbase.ServerLoad;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.backup.master.BackupController;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.regionserver.Region;
@@ -31,22 +57,11 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.ClassLoaderTestHelper;
 import org.apache.hadoop.hbase.util.CoprocessorClassLoader;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.ServerLoad;
-import org.apache.hadoop.hbase.RegionLoad;
-
-import java.io.*;
-import java.util.*;
-
-import org.junit.*;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
-
 /**
  * Test coprocessors class loading.
  */
@@ -84,7 +99,7 @@ public class TestClassLoading {
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
     Configuration conf = TEST_UTIL.getConfiguration();
-
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     // regionCoprocessor1 will be loaded on all regionservers, since it is
     // loaded for any tables (user or meta).
     conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
@@ -543,7 +558,7 @@ public class TestClassLoading {
 
   @Test
   public void testFindCoprocessors() {
-    // HBASE 12277: 
+    // HBASE 12277:
     CoprocessorHost masterCpHost =
                              TEST_UTIL.getHBaseCluster().getMaster().getMasterCoprocessorHost();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
index 4a10ff9..67c20f3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -144,6 +145,7 @@ public class TestMasterCoprocessorExceptionWithAbort {
   @BeforeClass
   public static void setupBeforeClass() throws Exception {
     Configuration conf = UTIL.getConfiguration();
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
         BuggyMasterObserver.class.getName());
     conf.setBoolean(CoprocessorHost.ABORT_ON_ERROR_KEY, true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java
index 8534b6b..a892dcb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -126,6 +127,7 @@ public class TestMasterCoprocessorExceptionWithRemove {
   @BeforeClass
   public static void setupBeforeClass() throws Exception {
     Configuration conf = UTIL.getConfiguration();
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     conf.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
         BuggyMasterObserver.class.getName());
     UTIL.getConfiguration().setBoolean(CoprocessorHost.ABORT_ON_ERROR_KEY, false);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index aefa30e..807b2a2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -68,6 +68,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SplitLogCounters;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.ConnectionUtils;
 import org.apache.hadoop.hbase.client.Delete;
@@ -80,7 +81,6 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
 import org.apache.hadoop.hbase.coordination.ZKSplitLogManagerCoordination;
 import org.apache.hadoop.hbase.exceptions.OperationConflictException;
 import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
@@ -163,6 +163,7 @@ public class TestDistributedLogSplitting {
   private void startCluster(int num_rs) throws Exception {
     SplitLogCounters.resetCounters();
     LOG.info("Starting cluster");
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     conf.getLong("hbase.splitlog.max.resubmit", 0);
     // Make the failure test faster
     conf.setInt("zookeeper.recovery.retry", 0);
@@ -1136,8 +1137,8 @@ public class TestDistributedLogSplitting {
       out.write(Bytes.toBytes("corrupted bytes"));
       out.close();
       ZKSplitLogManagerCoordination coordination =
-          (ZKSplitLogManagerCoordination) ((BaseCoordinatedStateManager) master
-              .getCoordinatedStateManager()).getSplitLogManagerCoordination();
+          (ZKSplitLogManagerCoordination) master
+              .getCoordinatedStateManager().getSplitLogManagerCoordination();
       coordination.setIgnoreDeleteForTesting(true);
       executor = Executors.newSingleThreadExecutor();
       Runnable runnable = new Runnable() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
index ff98292..80c0b66 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.master.RegionState.State;
@@ -98,7 +99,7 @@ public class TestMasterFailover {
 
     // Start the cluster
     HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(NUM_MASTERS, NUM_RS);
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
 
@@ -205,6 +206,7 @@ public class TestMasterFailover {
 
     // Start the cluster
     HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(conf);
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(NUM_MASTERS, NUM_RS);
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
     log("Cluster started");
@@ -333,6 +335,7 @@ public class TestMasterFailover {
 
     // Start the cluster
     HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(NUM_MASTERS, NUM_RS);
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
     log("Cluster started");

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index ce66ee4..6aefe6e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -38,11 +38,12 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MetaTableAccessor;
+import org.apache.hadoop.hbase.MetaTableAccessor.Visitor;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.MetaTableAccessor.Visitor;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -71,6 +72,7 @@ public class TestMasterOperationsForRegionReplicas {
   public static void setupBeforeClass() throws Exception {
     conf = TEST_UTIL.getConfiguration();
     conf.setBoolean("hbase.tests.use.shortcircuit.reads", false);
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(numSlaves);
     CONNECTION = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
     ADMIN = CONNECTION.getAdmin();

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
index cfdd581..07e1c06 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
@@ -58,6 +59,7 @@ public class TestMasterRestartAfterDisablingTable {
     log("Starting cluster");
     Configuration conf = HBaseConfiguration.create();
     HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(conf);
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(NUM_MASTERS, NUM_RS);
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
     log("Waiting for active/ready master");

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
index 5cf7c8e..379d9f8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -62,6 +63,7 @@ public class TestRestartCluster {
 
   @Test (timeout=300000)
   public void testClusterRestart() throws Exception {
+    UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     UTIL.startMiniCluster(3);
     while (!UTIL.getMiniHBaseCluster().getMaster().isInitialized()) {
       Threads.sleep(1);
@@ -108,6 +110,7 @@ public class TestRestartCluster {
    */
   @Test (timeout=300000)
   public void testRetainAssignmentOnRestart() throws Exception {
+    UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     UTIL.startMiniCluster(2);
     while (!UTIL.getMiniHBaseCluster().getMaster().isInitialized()) {
       Threads.sleep(1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
index c9de2c4..1768e81 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -67,6 +68,7 @@ public class  TestRollingRestart {
     log("Starting cluster");
     Configuration conf = HBaseConfiguration.create();
     HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(conf);
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(NUM_MASTERS, NUM_RS);
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
     log("Waiting for active/ready master");

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 1ec0bf7..48d9bb3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -17,9 +17,40 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.CompatibilityFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.test.MetricsAssertHelper;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
@@ -33,12 +64,6 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.*;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
 
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestRegionServerMetrics {
@@ -61,6 +86,7 @@ public class TestRegionServerMetrics {
   public static void startCluster() throws Exception {
     metricsHelper = CompatibilityFactory.getInstance(MetricsAssertHelper.class);
     TEST_UTIL = new HBaseTestingUtility();
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     conf = TEST_UTIL.getConfiguration();
     conf.getLong("hbase.splitlog.max.resubmit", 0);
     // Make the failure test faster
@@ -512,7 +538,7 @@ public class TestRegionServerMetrics {
     admin.close();
     connection.close();
   }
-  
+
   @Test
   @Ignore
   public void testRangeCountMetrics() throws Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
index 7dbecdc..01b8357 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
@@ -37,12 +37,13 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.SecurityTests;
@@ -92,16 +93,13 @@ public class TestTablePermissions {
   public static void beforeClass() throws Exception {
     // setup configuration
     Configuration conf = UTIL.getConfiguration();
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     SecureTestUtil.enableSecurity(conf);
-
     UTIL.startMiniCluster();
-
     // Wait for the ACL table to become available
     UTIL.waitTableEnabled(AccessControlLists.ACL_TABLE_NAME);
-
     ZKW = new ZooKeeperWatcher(UTIL.getConfiguration(),
       "TestTablePermissions", ABORTABLE);
-
     UTIL.createTable(TEST_TABLE, TEST_FAMILY);
     UTIL.createTable(TEST_TABLE2, TEST_FAMILY);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
index 0a6e422..ef39623 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Get;
@@ -81,6 +82,7 @@ public class TestVisibilityLabelsWithACL {
   public static void setupBeforeClass() throws Exception {
     // setup configuration
     conf = TEST_UTIL.getConfiguration();
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     SecureTestUtil.enableSecurity(conf);
     conf.set("hbase.coprocessor.master.classes", AccessController.class.getName() + ","
         + VisibilityController.class.getName());
@@ -123,6 +125,7 @@ public class TestVisibilityLabelsWithACL {
     SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER2.getShortName(), tableName,
       null, null, Permission.Action.READ);
     PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+      @Override
       public Void run() throws Exception {
         Scan s = new Scan();
         s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -152,6 +155,7 @@ public class TestVisibilityLabelsWithACL {
     final Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL
         + "&!" + PRIVATE, SECRET + "&!" + PRIVATE);
     PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+      @Override
       public Void run() throws Exception {
         Scan s = new Scan();
         s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -176,6 +180,7 @@ public class TestVisibilityLabelsWithACL {
     final Table table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + CONFIDENTIAL
         + "&!" + PRIVATE, SECRET + "&!" + PRIVATE);
     PrivilegedExceptionAction<Void> scanAction = new PrivilegedExceptionAction<Void>() {
+      @Override
       public Void run() throws Exception {
         Get g = new Get(row1);
         g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -205,6 +210,7 @@ public class TestVisibilityLabelsWithACL {
     SecureTestUtil.grantOnTable(TEST_UTIL, NORMAL_USER2.getShortName(), tableName,
       null, null, Permission.Action.READ);
     PrivilegedExceptionAction<Void> getAction = new PrivilegedExceptionAction<Void>() {
+      @Override
       public Void run() throws Exception {
         Get g = new Get(row1);
         g.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
@@ -221,8 +227,9 @@ public class TestVisibilityLabelsWithACL {
 
   @Test
   public void testLabelsTableOpsWithDifferentUsers() throws Throwable {
-    PrivilegedExceptionAction<VisibilityLabelsResponse> action = 
+    PrivilegedExceptionAction<VisibilityLabelsResponse> action =
         new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+      @Override
       public VisibilityLabelsResponse run() throws Exception {
         try (Connection conn = ConnectionFactory.createConnection(conf)) {
           return VisibilityClient.addLabels(conn, new String[] { "l1", "l2" });
@@ -238,6 +245,7 @@ public class TestVisibilityLabelsWithACL {
         .getResult(1).getException().getName());
 
     action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+      @Override
       public VisibilityLabelsResponse run() throws Exception {
         try (Connection conn = ConnectionFactory.createConnection(conf)) {
           return VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL, PRIVATE }, "user1");
@@ -253,6 +261,7 @@ public class TestVisibilityLabelsWithACL {
         .getResult(1).getException().getName());
 
     action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+      @Override
       public VisibilityLabelsResponse run() throws Exception {
         try (Connection conn = ConnectionFactory.createConnection(conf)) {
           return VisibilityClient.setAuths(conn, new String[] { CONFIDENTIAL, PRIVATE }, "user1");
@@ -266,6 +275,7 @@ public class TestVisibilityLabelsWithACL {
     assertTrue(response.getResult(1).getException().getValue().isEmpty());
 
     action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+      @Override
       public VisibilityLabelsResponse run() throws Exception {
         try (Connection conn = ConnectionFactory.createConnection(conf)) {
           return VisibilityClient.clearAuths(conn, new String[] {
@@ -288,8 +298,9 @@ public class TestVisibilityLabelsWithACL {
 
     VisibilityClient.setAuths(TEST_UTIL.getConnection(), new String[] { CONFIDENTIAL, PRIVATE },
       "user3");
-    PrivilegedExceptionAction<GetAuthsResponse> action1 = 
+    PrivilegedExceptionAction<GetAuthsResponse> action1 =
         new PrivilegedExceptionAction<GetAuthsResponse>() {
+      @Override
       public GetAuthsResponse run() throws Exception {
         try (Connection conn = ConnectionFactory.createConnection(conf)) {
           return VisibilityClient.getAuths(conn, "user3");

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
index c198d55..ec8c17b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
@@ -19,6 +19,33 @@
 
 package org.apache.hadoop.hbase.util;
 
+import static org.apache.hadoop.hbase.util.hbck.HbckTestingUtil.assertErrors;
+import static org.apache.hadoop.hbase.util.hbck.HbckTestingUtil.assertNoErrors;
+import static org.apache.hadoop.hbase.util.hbck.HbckTestingUtil.doFsck;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -33,6 +60,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -51,8 +79,8 @@ import org.apache.hadoop.hbase.master.RegionStates;
 import org.apache.hadoop.hbase.master.TableLockManager;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.SplitTransactionImpl;
 import org.apache.hadoop.hbase.regionserver.SplitTransactionFactory;
+import org.apache.hadoop.hbase.regionserver.SplitTransactionImpl;
 import org.apache.hadoop.hbase.regionserver.TestEndToEndSplitTransaction;
 import org.apache.hadoop.hbase.replication.ReplicationFactory;
 import org.apache.hadoop.hbase.replication.ReplicationQueues;
@@ -69,26 +97,6 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.Callable;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import static org.apache.hadoop.hbase.util.hbck.HbckTestingUtil.*;
-import static org.junit.Assert.*;
-
 @Category({MiscTests.class, LargeTests.class})
 public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
 
@@ -105,6 +113,8 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     conf.setInt("hbase.hconnection.threads.core", POOL_SIZE);
     conf.setInt("hbase.hbck.close.timeout", 2 * REGION_ONLINE_TIMEOUT);
     conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 8 * REGION_ONLINE_TIMEOUT);
+    conf.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
+
     TEST_UTIL.startMiniCluster(1);
 
     tableExecutorService = new ThreadPoolExecutor(1, POOL_SIZE, 60, TimeUnit.SECONDS,
@@ -122,7 +132,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     admin.setBalancerRunning(false, true);
 
     TEST_UTIL.waitUntilAllSystemRegionsAssigned();
-    
+
   }
 
   @AfterClass
@@ -603,7 +613,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
       hbck.close();
     }
   }
-  
+
   @Test (timeout=180000)
   public void testHbckAfterRegionMerge() throws Exception {
     TableName table = TableName.valueOf("testMergeRegionFilesInHdfs");
@@ -1521,7 +1531,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     // check no errors
     HBaseFsck hbck = doFsck(conf, false);
     assertNoErrors(hbck);
-    
+
     // create peer
     ReplicationAdmin replicationAdmin = new ReplicationAdmin(conf);
     Assert.assertEquals(0, replicationAdmin.getPeersCount());
@@ -1530,7 +1540,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     replicationAdmin.addPeer("1", "127.0.0.1:" + zkPort + ":/hbase");
     replicationAdmin.getPeersCount();
     Assert.assertEquals(1, replicationAdmin.getPeersCount());
-    
+
     // create replicator
     ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "Test Hbase Fsck", connection);
     ReplicationQueues repQueues =
@@ -1542,7 +1552,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     Assert.assertEquals(2, repQueues.getAllQueues().size());
     hbck = doFsck(conf, false);
     assertNoErrors(hbck);
-    
+
     // queues for removed peer
     repQueues.addLog("2", "file1");
     repQueues.addLog("2-server2", "file1");
@@ -1551,7 +1561,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     assertErrors(hbck, new HBaseFsck.ErrorReporter.ERROR_CODE[] {
         HBaseFsck.ErrorReporter.ERROR_CODE.UNDELETED_REPLICATION_QUEUE,
         HBaseFsck.ErrorReporter.ERROR_CODE.UNDELETED_REPLICATION_QUEUE });
-    
+
     // fix the case
     hbck = doFsck(conf, true);
     hbck = doFsck(conf, false);
@@ -1560,7 +1570,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     Assert.assertEquals(2, repQueues.getAllQueues().size());
     Assert.assertNull(repQueues.getLogsInQueue("2"));
     Assert.assertNull(repQueues.getLogsInQueue("2-sever2"));
-    
+
     replicationAdmin.removePeer("1");
     repQueues.removeAllQueues();
     zkw.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd41083d/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
index cab82ad..903f796 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
@@ -38,11 +38,11 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
@@ -94,6 +94,7 @@ public class OfflineMetaRebuildTestCore {
   public void setUpBefore() throws Exception {
     TEST_UTIL = new HBaseTestingUtility();
     TEST_UTIL.getConfiguration().setInt("dfs.datanode.max.xceivers", 9192);
+    TEST_UTIL.getConfiguration().setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
     TEST_UTIL.startMiniCluster(3);
     conf = TEST_UTIL.getConfiguration();
     this.connection = ConnectionFactory.createConnection(conf);