You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2017/09/28 17:20:10 UTC
hbase git commit: HBASE-18887 After full backup passed on hdfs root
and incremental failed, full backup cannot be cleaned (Vladimir Rodionov)
Repository: hbase
Updated Branches:
refs/heads/master a11a35a11 -> ca2959824
HBASE-18887 After full backup passed on hdfs root and incremental failed, full backup cannot be cleaned (Vladimir Rodionov)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca295982
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca295982
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca295982
Branch: refs/heads/master
Commit: ca2959824ddf206143b819ae85bb29b6b60faa3b
Parents: a11a35a
Author: tedyu <yu...@gmail.com>
Authored: Thu Sep 28 10:20:02 2017 -0700
Committer: tedyu <yu...@gmail.com>
Committed: Thu Sep 28 10:20:02 2017 -0700
----------------------------------------------------------------------
.../hadoop/hbase/backup/impl/BackupCommands.java | 18 +++++++++++++++---
.../hadoop/hbase/backup/TestBackupBase.java | 12 ++++++++----
.../hbase/backup/TestBackupCommandLineTool.java | 12 +++++++++++-
3 files changed, 34 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/ca295982/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
index 2dfd46e..194d350 100644
--- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
+++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java
@@ -58,11 +58,11 @@ import org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.util.BackupSet;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* General backup commands, options and usage messages
@@ -73,6 +73,9 @@ public final class BackupCommands {
public final static String INCORRECT_USAGE = "Incorrect usage";
+ public final static String TOP_LEVEL_NOT_ALLOWED =
+ "Top level (root) folder is not allowed to be a backup destination";
+
public static final String USAGE = "Usage: hbase backup COMMAND [command-specific arguments]\n"
+ "where COMMAND is one of:\n" + " create create a new backup image\n"
+ " delete delete an existing backup image\n"
@@ -283,7 +286,11 @@ public final class BackupCommands {
printUsage();
throw new IOException(INCORRECT_USAGE);
}
-
+ String targetBackupDir = args[2];
+ // Check if backup destination is top level (root) folder - not allowed
+ if (isRootFolder(targetBackupDir)) {
+ throw new IOException(TOP_LEVEL_NOT_ALLOWED);
+ }
String tables = null;
// Check if we have both: backup set and list of tables
@@ -331,7 +338,7 @@ public final class BackupCommands {
.withBackupType(BackupType.valueOf(args[1].toUpperCase()))
.withTableList(
tables != null ? Lists.newArrayList(BackupUtils.parseTableNames(tables)) : null)
- .withTargetRootDir(args[2]).withTotalTasks(workers)
+ .withTargetRootDir(targetBackupDir).withTotalTasks(workers)
.withBandwidthPerTasks(bandwidth).withBackupSetName(setName).build();
String backupId = admin.backupTables(request);
System.out.println("Backup session " + backupId + " finished. Status: SUCCESS");
@@ -341,6 +348,11 @@ public final class BackupCommands {
}
}
+ private boolean isRootFolder(String targetBackupDir) {
+ Path p = new Path(targetBackupDir);
+ return p.isRoot();
+ }
+
private boolean verifyPath(String path) {
try {
Path p = new Path(path);
http://git-wip-us.apache.org/repos/asf/hbase/blob/ca295982/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
index 8752ca2..69db342 100644
--- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
+++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
@@ -98,8 +98,8 @@ public class TestBackupBase {
protected static final byte[] qualName = Bytes.toBytes("q1");
protected static final byte[] famName = Bytes.toBytes("f");
- protected static String BACKUP_ROOT_DIR = "/backupUT";
- protected static String BACKUP_REMOTE_ROOT_DIR = "/backupUT";
+ protected static String BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT";
+ protected static String BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT";
protected static String provider = "defaultProvider";
protected static boolean secure = false;
@@ -316,10 +316,14 @@ public class TestBackupBase {
conf1 = TEST_UTIL.getConfiguration();
TEST_UTIL.startMiniMapReduceCluster();
- BACKUP_ROOT_DIR = TEST_UTIL.getConfiguration().get("fs.defaultFS") + "/backupUT";
+ BACKUP_ROOT_DIR =
+ new Path ( new Path(TEST_UTIL.getConfiguration().get("fs.defaultFS")),
+ BACKUP_ROOT_DIR).toString();
LOG.info("ROOTDIR " + BACKUP_ROOT_DIR);
if (useSecondCluster) {
- BACKUP_REMOTE_ROOT_DIR = TEST_UTIL2.getConfiguration().get("fs.defaultFS") + "/backupUT";
+ BACKUP_REMOTE_ROOT_DIR =
+ new Path ( new Path(TEST_UTIL2.getConfiguration().get("fs.defaultFS"))
+ + BACKUP_REMOTE_ROOT_DIR).toString();
LOG.info("REMOTE ROOTDIR " + BACKUP_REMOTE_ROOT_DIR);
}
createTables();
http://git-wip-us.apache.org/repos/asf/hbase/blob/ca295982/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
index 08002fb..c9fc84f 100644
--- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
+++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupCommandLineTool.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.backup;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
@@ -80,6 +81,15 @@ public class TestBackupCommandLineTool {
assertTrue(output.indexOf(USAGE_DESCRIBE) >= 0);
}
+
+ @Test
+ public void testBackupDriverCreateTopLevelBackupDest() throws Exception {
+ String[] args = new String[] { "create", "full", "hdfs://localhost:1020", "-t", "t1" };
+ int result = ToolRunner.run(conf, new BackupDriver(), args);
+ // FAILED
+ assertEquals(1, result);
+ }
+
@Test
public void testBackupDriverCreateHelp() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -419,7 +429,7 @@ public class TestBackupCommandLineTool {
public void testBackupDriverBackupSetAndList() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
System.setOut(new PrintStream(baos));
- String[] args = new String[] { "create", "full", "file:/", "-t", "clicks", "-s", "s" };
+ String[] args = new String[] { "create", "full", "file:/localhost", "-t", "clicks", "-s", "s" };
ToolRunner.run(conf, new BackupDriver(), args);
String output = baos.toString();