You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2012/08/20 23:30:40 UTC

svn commit: r1375228 - in /hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util: FSUtils.java HBaseFsck.java

Author: jmhsieh
Date: Mon Aug 20 21:30:40 2012
New Revision: 1375228

URL: http://svn.apache.org/viewvc?rev=1375228&view=rev
Log:
HBASE-5714 Add write permissions check before any hbck run that modifies hdfs (Liang Xie)

Modified:
    hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
    hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java

Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java?rev=1375228&r1=1375227&r2=1375228&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java Mon Aug 20 21:30:40 2012
@@ -40,6 +40,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -49,6 +50,8 @@ import org.apache.hadoop.hbase.master.HM
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -1028,6 +1031,42 @@ public abstract class FSUtils {
   }
 
   /**
+   * Throw an exception if an action is not permitted by a user on a file.
+   * 
+   * @param ugi
+   *          the user
+   * @param file
+   *          the file
+   * @param action
+   *          the action
+   */
+  public static void checkAccess(UserGroupInformation ugi, FileStatus file,
+      FsAction action) throws AccessControlException {
+    if (ugi.getUserName().equals(file.getOwner())) {
+      if (file.getPermission().getUserAction().implies(action)) {
+        return;
+      }
+    } else if (contains(ugi.getGroupNames(), file.getGroup())) {
+      if (file.getPermission().getGroupAction().implies(action)) {
+        return;
+      }
+    } else if (file.getPermission().getOtherAction().implies(action)) {
+      return;
+    }
+    throw new AccessControlException("Permission denied:" + " action=" + action
+        + " path=" + file.getPath() + " user=" + ugi.getUserName());
+  }
+
+  private static boolean contains(String[] groups, String user) {
+    for (String group : groups) {
+      if (group.equals(user)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
    * Calls fs.exists(). Checks if the specified path exists
    * 
    * @param fs

Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java?rev=1375228&r1=1375227&r2=1375228&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java Mon Aug 20 21:30:40 2012
@@ -47,6 +47,7 @@ import org.apache.hadoop.fs.FSDataInputS
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -78,12 +79,15 @@ import org.apache.hadoop.hbase.ipc.HRegi
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
 import org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandler;
 import org.apache.hadoop.hbase.util.hbck.TableIntegrityErrorHandlerImpl;
 import org.apache.hadoop.hbase.zookeeper.RootRegionTracker;
 import org.apache.hadoop.hbase.zookeeper.ZKTable;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.zookeeper.KeeperException;
 
 import com.google.common.base.Joiner;
@@ -182,6 +186,7 @@ public class HBaseFsck {
   private boolean rerun = false; // if we tried to fix something, rerun hbck
   private static boolean summary = false; // if we want to print less output
   private boolean checkMetaOnly = false;
+  private boolean ignorePreCheckPermission = false; // if pre-check permission
 
   /*********
    * State
@@ -1192,6 +1197,27 @@ public class HBaseFsck {
     }
   }
 
+  private void preCheckPermission() throws IOException, AccessControlException {
+    if (shouldIgnorePreCheckPermission()) {
+      return;
+    }
+
+    Path hbaseDir = new Path(conf.get(HConstants.HBASE_DIR));
+    FileSystem fs = hbaseDir.getFileSystem(conf);
+    UserGroupInformation ugi = User.getCurrent().getUGI();
+    FileStatus[] files = fs.listStatus(hbaseDir);
+    for (FileStatus file : files) {
+      try {
+        FSUtils.checkAccess(ugi, file, FsAction.WRITE);
+      } catch (AccessControlException ace) {
+        LOG.warn("Got AccessControlException when preCheckPermission ", ace);
+        System.err.println("Current user " + ugi.getUserName() + " does not have write perms to " + file.getPath()
+            + ". Please rerun hbck as hdfs user " + file.getOwner());
+        throw new AccessControlException(ace);
+      }
+    }
+  }
+
   /**
    * Deletes region from meta table
    */
@@ -3017,6 +3043,14 @@ public class HBaseFsck {
     return fixSplitParents;
   }
 
+  public boolean shouldIgnorePreCheckPermission() {
+    return ignorePreCheckPermission;
+  }
+
+  public void setIgnorePreCheckPermission(boolean ignorePreCheckPermission) {
+    this.ignorePreCheckPermission = ignorePreCheckPermission;
+  }
+
   /**
    * @param mm maximum number of regions to merge into a single region.
    */
@@ -3091,6 +3125,7 @@ public class HBaseFsck {
     System.err.println("   -sidelineBigOverlaps  When fixing region overlaps, allow to sideline big overlaps");
     System.err.println("   -maxOverlapsToSideline <n>  When fixing region overlaps, allow at most <n> regions to sideline per group. (n=" + DEFAULT_OVERLAPS_TO_SIDELINE +" by default)");
     System.err.println("   -fixSplitParents  Try to force offline split parents to be online.");
+    System.err.println("   -ignorePreCheckPermission  ignore filesystem permission pre-check");
     System.err.println("");
     System.err.println("   -repair           Shortcut for -fixAssignments -fixMeta -fixHdfsHoles " +
         "-fixHdfsOrphans -fixHdfsOverlaps -fixVersionFile -sidelineBigOverlaps");
@@ -3174,6 +3209,8 @@ public class HBaseFsck {
         fsck.setSidelineBigOverlaps(true);
       } else if (cmd.equals("-fixSplitParents")) {
         fsck.setFixSplitParents(true);
+      } else if (cmd.equals("-ignorePreCheckPermission")) {
+        fsck.setIgnorePreCheckPermission(true);
       } else if (cmd.equals("-repair")) {
         // this attempts to merge overlapping hdfs regions, needs testing
         // under load
@@ -3232,6 +3269,15 @@ public class HBaseFsck {
         System.out.println("Allow checking/fixes for table: " + cmd);
       }
     }
+
+    // pre-check current user has FS write permission or not
+    try {
+      fsck.preCheckPermission();
+    } catch (AccessControlException ace) {
+      Runtime.getRuntime().exit(-1);
+    } catch (IOException ioe) {
+      Runtime.getRuntime().exit(-1);
+    }
     // do the real work of fsck
     fsck.connect();
     int code = fsck.onlineHbck();