You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by op...@apache.org on 2018/10/23 02:21:32 UTC

hbase git commit: HBASE-21356 bulkLoadHFile API should ensure that rs has the source hfile's write permissionls

Repository: hbase
Updated Branches:
  refs/heads/master 931156f66 -> ae13b0b29


HBASE-21356 bulkLoadHFile API should ensure that rs has the source hfile's write permissionls


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ae13b0b2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ae13b0b2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ae13b0b2

Branch: refs/heads/master
Commit: ae13b0b2932142c39fc37e6c99b398b93d7d6c7e
Parents: 931156f
Author: huzheng <op...@gmail.com>
Authored: Mon Oct 22 18:04:38 2018 +0800
Committer: huzheng <op...@gmail.com>
Committed: Tue Oct 23 10:18:40 2018 +0800

----------------------------------------------------------------------
 .../hadoop/hbase/regionserver/HStore.java       |   6 +-
 .../security/access/TestAccessController.java   | 112 +++++++++++++------
 2 files changed, 83 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/ae13b0b2/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 6f2a6fa..032dc5f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -55,6 +55,7 @@ import java.util.stream.LongStream;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
@@ -783,8 +784,9 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
     try {
       LOG.info("Validating hfile at " + srcPath + " for inclusion in "
           + "store " + this + " region " + this.getRegionInfo().getRegionNameAsString());
-      reader = HFile.createReader(srcPath.getFileSystem(conf), srcPath, cacheConf,
-        isPrimaryReplicaStore(), conf);
+      FileSystem srcFs = srcPath.getFileSystem(conf);
+      srcFs.access(srcPath, FsAction.READ_WRITE);
+      reader = HFile.createReader(srcFs, srcPath, cacheConf, isPrimaryReplicaStore(), conf);
       reader.loadFileInfo();
 
       Optional<byte[]> firstKey = reader.getFirstRowKey();

http://git-wip-us.apache.org/repos/asf/hbase/blob/ae13b0b2/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 163c2ec..78bb5f6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -155,6 +155,7 @@ public class TestAccessController extends SecureTestUtil {
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestAccessController.class);
 
+  private static final FsPermission FS_PERMISSION_ALL = FsPermission.valueOf("-rwxrwxrwx");
   private static final Logger LOG = LoggerFactory.getLogger(TestAccessController.class);
   private static TableName TEST_TABLE = TableName.valueOf("testtable1");
   private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -983,7 +984,7 @@ public class TestAccessController extends SecureTestUtil {
       fs.mkdirs(dir);
       // need to make it globally writable
       // so users creating HFiles have write permissions
-      fs.setPermission(dir, FsPermission.valueOf("-rwxrwxrwx"));
+      fs.setPermission(dir, FS_PERMISSION_ALL);
 
       AccessTestAction bulkLoadAction = new AccessTestAction() {
         @Override
@@ -994,9 +995,8 @@ public class TestAccessController extends SecureTestUtil {
           byte[][][] hfileRanges = { { { (byte) 0 }, { (byte) 9 } } };
 
           Path bulkLoadBasePath = new Path(dir, new Path(User.getCurrent().getName()));
-          new BulkLoadHelper(bulkLoadBasePath).bulkLoadHFile(TEST_TABLE, TEST_FAMILY,
-            TEST_QUALIFIER, hfileRanges, numRows);
-
+          new BulkLoadHelper(bulkLoadBasePath).initHFileData(TEST_FAMILY, TEST_QUALIFIER,
+            hfileRanges, numRows, FS_PERMISSION_ALL).bulkLoadHFile(TEST_TABLE);
           return null;
         }
       };
@@ -1014,6 +1014,51 @@ public class TestAccessController extends SecureTestUtil {
     }
   }
 
+  private class BulkLoadAccessTestAction implements AccessTestAction {
+    private FsPermission filePermission;
+    private Path testDataDir;
+
+    public BulkLoadAccessTestAction(FsPermission perm, Path testDataDir) {
+      this.filePermission = perm;
+      this.testDataDir = testDataDir;
+    }
+
+    @Override
+    public Object run() throws Exception {
+      FileSystem fs = TEST_UTIL.getTestFileSystem();
+      fs.mkdirs(testDataDir);
+      fs.setPermission(testDataDir, FS_PERMISSION_ALL);
+      // Making the assumption that the test table won't split between the range
+      byte[][][] hfileRanges = { { { (byte) 0 }, { (byte) 9 } } };
+      Path bulkLoadBasePath = new Path(testDataDir, new Path(User.getCurrent().getName()));
+      new BulkLoadHelper(bulkLoadBasePath)
+          .initHFileData(TEST_FAMILY, TEST_QUALIFIER, hfileRanges, 3, filePermission)
+          .bulkLoadHFile(TEST_TABLE);
+      return null;
+    }
+  }
+
+  @Test
+  public void testBulkLoadWithoutWritePermission() throws Exception {
+    // Use the USER_CREATE to initialize the source directory.
+    Path testDataDir0 = TEST_UTIL.getDataTestDirOnTestFS("testBulkLoadWithoutWritePermission0");
+    Path testDataDir1 = TEST_UTIL.getDataTestDirOnTestFS("testBulkLoadWithoutWritePermission1");
+    AccessTestAction bulkLoadAction1 =
+        new BulkLoadAccessTestAction(FsPermission.valueOf("-r-xr-xr-x"), testDataDir0);
+    AccessTestAction bulkLoadAction2 =
+        new BulkLoadAccessTestAction(FS_PERMISSION_ALL, testDataDir1);
+    // Test the incorrect case.
+    BulkLoadHelper.setPermission(TEST_UTIL.getTestFileSystem(),
+      TEST_UTIL.getTestFileSystem().getWorkingDirectory(), FS_PERMISSION_ALL);
+    try {
+      USER_CREATE.runAs(bulkLoadAction1);
+      fail("Should fail because the hbase user has no write permission on hfiles.");
+    } catch (IOException e) {
+    }
+    // Ensure the correct case.
+    USER_CREATE.runAs(bulkLoadAction2);
+  }
+
   public static class BulkLoadHelper {
     private final FileSystem fs;
     private final Path loadPath;
@@ -1029,64 +1074,65 @@ public class TestAccessController extends SecureTestUtil {
     private void createHFile(Path path,
         byte[] family, byte[] qualifier,
         byte[] startKey, byte[] endKey, int numRows) throws IOException {
-
       HFile.Writer writer = null;
       long now = System.currentTimeMillis();
       try {
         HFileContext context = new HFileContextBuilder().build();
-        writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
-            .withPath(fs, path)
-            .withFileContext(context)
-            .create();
+        writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path)
+            .withFileContext(context).create();
         // subtract 2 since numRows doesn't include boundary keys
-        for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows-2)) {
+        for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows - 2)) {
           KeyValue kv = new KeyValue(key, family, qualifier, now, key);
           writer.append(kv);
         }
       } finally {
-        if(writer != null)
+        if (writer != null) {
           writer.close();
+        }
       }
     }
 
-    private void bulkLoadHFile(
-        TableName tableName,
-        byte[] family,
-        byte[] qualifier,
-        byte[][][] hfileRanges,
-        int numRowsPerRange) throws Exception {
-
+    private BulkLoadHelper initHFileData(byte[] family, byte[] qualifier, byte[][][] hfileRanges,
+        int numRowsPerRange, FsPermission filePermission) throws Exception {
       Path familyDir = new Path(loadPath, Bytes.toString(family));
       fs.mkdirs(familyDir);
       int hfileIdx = 0;
+      List<Path> hfiles = new ArrayList<>();
       for (byte[][] range : hfileRanges) {
         byte[] from = range[0];
         byte[] to = range[1];
-        createHFile(new Path(familyDir, "hfile_"+(hfileIdx++)),
-            family, qualifier, from, to, numRowsPerRange);
+        Path hfile = new Path(familyDir, "hfile_" + (hfileIdx++));
+        hfiles.add(hfile);
+        createHFile(hfile, family, qualifier, from, to, numRowsPerRange);
       }
-      //set global read so RegionServer can move it
-      setPermission(loadPath, FsPermission.valueOf("-rwxrwxrwx"));
-
+      // set global read so RegionServer can move it
+      setPermission(fs, loadPath, FS_PERMISSION_ALL);
+      // Ensure the file permission as requested.
+      for (Path hfile : hfiles) {
+        setPermission(fs, hfile, filePermission);
+      }
+      return this;
+    }
 
+    private void bulkLoadHFile(TableName tableName) throws Exception {
       try (Connection conn = ConnectionFactory.createConnection(conf);
-           Admin admin = conn.getAdmin();
-           RegionLocator locator = conn.getRegionLocator(tableName);
-           Table table = conn.getTable(tableName)) {
+          Admin admin = conn.getAdmin();
+          RegionLocator locator = conn.getRegionLocator(tableName);
+          Table table = conn.getTable(tableName)) {
         TEST_UTIL.waitUntilAllRegionsAssigned(tableName);
         LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
         loader.doBulkLoad(loadPath, admin, table, locator);
       }
     }
 
-    public void setPermission(Path dir, FsPermission perm) throws IOException {
-      if(!fs.getFileStatus(dir).isDirectory()) {
-        fs.setPermission(dir,perm);
-      }
-      else {
-        for(FileStatus el : fs.listStatus(dir)) {
+    private static void setPermission(FileSystem fs, Path dir, FsPermission perm)
+        throws IOException {
+      if (!fs.getFileStatus(dir).isDirectory()) {
+        fs.setPermission(dir, perm);
+      } else {
+        for (FileStatus el : fs.listStatus(dir)) {
           fs.setPermission(el.getPath(), perm);
-          setPermission(el.getPath() , perm);
+          setPermission(fs, el.getPath(), perm);
         }
       }
     }