You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2014/08/07 00:14:05 UTC
svn commit: r1616364 - in /hive/trunk:
common/src/java/org/apache/hadoop/hive/common/
itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/
ql/src/java/org/apache/h...
Author: jdere
Date: Wed Aug 6 22:14:05 2014
New Revision: 1616364
URL: http://svn.apache.org/r1616364
Log:
HIVE-7583: Use FileSystem.access() if available to check file access for user (Jason Dere, reviewed by Thejas Nair)
Added:
hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java
hive/trunk/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Wed Aug 6 22:14:05 2014
@@ -22,6 +22,8 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
+import java.security.AccessControlException;
+import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
@@ -352,35 +354,47 @@ public final class FileUtils {
}
/**
- * Check if the given FileStatus indicates that the action is allowed for
- * userName. It checks the group and other permissions also to determine this.
- *
- * @param userName
- * @param fsStatus
- * @param action
- * @return true if it is writable for userName
- */
- public static boolean isActionPermittedForUser(String userName, FileStatus fsStatus, FsAction action) {
- FsPermission permissions = fsStatus.getPermission();
- // check user perm
- if (fsStatus.getOwner().equals(userName)
- && permissions.getUserAction().implies(action)) {
- return true;
- }
- // check other perm
- if (permissions.getOtherAction().implies(action)) {
- return true;
- }
- // check group perm after ensuring user belongs to the file owner group
- String fileGroup = fsStatus.getGroup();
- String[] userGroups = UserGroupInformation.createRemoteUser(userName).getGroupNames();
- for (String group : userGroups) {
- if (group.equals(fileGroup)) {
- // user belongs to the file group
- return permissions.getGroupAction().implies(action);
+ * Perform a check to determine if the user is able to access the file passed in.
+ * If the user name passed in is different from the current user, this method will
+ * attempt to do impersonate the user to do the check; the current user should be
+ * able to create proxy users in this case.
+ * @param fs FileSystem of the path to check
+ * @param stat FileStatus representing the file
+ * @param action FsAction that will be checked
+ * @param user User name of the user that will be checked for access. If the user name
+ * is null or the same as the current user, no user impersonation will be done
+ * and the check will be done as the current user. Otherwise the file access
+ * check will be performed within a doAs() block to use the access privileges
+ * of this user. In this case the user must be configured to impersonate other
+ * users, otherwise this check will fail with error.
+ * @param groups List of groups for the user
+ * @throws IOException
+ * @throws AccessControlException
+ * @throws InterruptedException
+ * @throws Exception
+ */
+ public static void checkFileAccessWithImpersonation(final FileSystem fs,
+ final FileStatus stat, final FsAction action, final String user)
+ throws IOException, AccessControlException, InterruptedException, Exception {
+ UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(fs.getConf());
+ String currentUser = ShimLoader.getHadoopShims().getShortUserName(ugi);
+
+ if (user == null || currentUser.equals(user)) {
+ // No need to impersonate user, do the checks as the currently configured user.
+ ShimLoader.getHadoopShims().checkFileAccess(fs, stat, action);
+ return;
+ }
+
+ // Otherwise, try user impersonation. Current user must be configured to do user impersonation.
+ UserGroupInformation proxyUser = ShimLoader.getHadoopShims().createProxyUser(user);
+ ShimLoader.getHadoopShims().doAs(proxyUser, new PrivilegedExceptionAction<Object>() {
+ @Override
+ public Object run() throws Exception {
+ FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf());
+ ShimLoader.getHadoopShims().checkFileAccess(fsAsUser, stat, action);
+ return null;
}
- }
- return false;
+ });
}
/**
@@ -395,7 +409,7 @@ public final class FileUtils {
* @throws IOException
*/
public static boolean isActionPermittedForFileHierarchy(FileSystem fs, FileStatus fileStatus,
- String userName, FsAction action) throws IOException {
+ String userName, FsAction action) throws Exception {
boolean isDir = fileStatus.isDir();
FsAction dirActionNeeded = action;
@@ -403,7 +417,11 @@ public final class FileUtils {
// for dirs user needs execute privileges as well
dirActionNeeded.and(FsAction.EXECUTE);
}
- if (!isActionPermittedForUser(userName, fileStatus, dirActionNeeded)) {
+
+ try {
+ checkFileAccessWithImpersonation(fs, fileStatus, action, userName);
+ } catch (AccessControlException err) {
+ // Action not permitted for user
return false;
}
Added: hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java?rev=1616364&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java (added)
+++ hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java Wed Aug 6 22:14:05 2014
@@ -0,0 +1,198 @@
+package org.apache.hadoop.hive.ql.security;
+
+import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS;
+import static org.apache.hadoop.fs.permission.AclEntryType.GROUP;
+import static org.apache.hadoop.fs.permission.AclEntryType.OTHER;
+import static org.apache.hadoop.fs.permission.AclEntryType.USER;
+
+import java.lang.reflect.Method;
+import java.net.URI;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import com.google.common.collect.Lists;
+
+public class TestStorageBasedMetastoreAuthorizationProviderWithACL
+ extends TestStorageBasedMetastoreAuthorizationProvider {
+
+ protected static MiniDFSShim dfs = null;
+ protected static Path warehouseDir = null;
+ protected UserGroupInformation userUgi = null;
+ protected String testUserName = "test_user";
+
+
+ @Override
+ protected boolean isTestEnabled() {
+ // This test with HDFS ACLs will only work if FileSystem.access() is available in the
+ // version of hadoop-2 used to build Hive.
+ return doesAccessAPIExist();
+ }
+
+ private static boolean doesAccessAPIExist() {
+ boolean foundMethod = false;
+ try {
+ Method method = FileSystem.class.getMethod("access", Path.class, FsAction.class);
+ foundMethod = true;
+ } catch (NoSuchMethodException err) {
+ }
+ return foundMethod;
+ }
+
+ @Override
+ protected HiveConf createHiveConf() throws Exception {
+ userUgi = UserGroupInformation.createUserForTesting(testUserName, new String[] {});
+
+ // Hadoop FS ACLs do not work with LocalFileSystem, so set up MiniDFS.
+ HiveConf conf = super.createHiveConf();
+ String currentUserName = ShimLoader.getHadoopShims().getUGIForConf(conf).getShortUserName();
+ conf.set("dfs.namenode.acls.enabled", "true");
+ conf.set("hadoop.proxyuser." + currentUserName + ".groups", "*");
+ conf.set("hadoop.proxyuser." + currentUserName + ".hosts", "*");
+ dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
+ FileSystem fs = dfs.getFileSystem();
+
+ warehouseDir = new Path(new Path(fs.getUri()), "/warehouse");
+ fs.mkdirs(warehouseDir);
+ conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());
+ conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
+
+ return conf;
+ }
+
+ protected String setupUser() {
+ // Using MiniDFS, the permissions don't work properly because
+ // the current user gets treated as a superuser.
+ // For this test, specify a different (non-super) user.
+ InjectableDummyAuthenticator.injectUserName(userUgi.getShortUserName());
+ InjectableDummyAuthenticator.injectGroupNames(Arrays.asList(userUgi.getGroupNames()));
+ InjectableDummyAuthenticator.injectMode(true);
+ return userUgi.getShortUserName();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+
+ if (dfs != null) {
+ dfs.shutdown();
+ dfs = null;
+ }
+ }
+
+ protected void allowWriteAccessViaAcl(String userName, String location)
+ throws Exception {
+ // Set the FS perms to read-only access, and create ACL entries allowing write access.
+ List<AclEntry> aclSpec = Lists.newArrayList(
+ aclEntry(ACCESS, USER, FsAction.READ_EXECUTE),
+ aclEntry(ACCESS, GROUP, FsAction.READ_EXECUTE),
+ aclEntry(ACCESS, OTHER, FsAction.READ_EXECUTE),
+ aclEntry(ACCESS, USER, userName, FsAction.ALL)
+ );
+ FileSystem fs = FileSystem.get(new URI(location), clientHiveConf);
+ fs.setAcl(new Path(location), aclSpec);
+ }
+
+ protected void disallowWriteAccessViaAcl(String userName, String location)
+ throws Exception {
+ FileSystem fs = FileSystem.get(new URI(location), clientHiveConf);
+ fs.removeAcl(new Path(location));
+ setPermissions(location,"-r-xr-xr-x");
+ }
+
+ /**
+ * Create a new AclEntry with scope, type and permission (no name).
+ * Borrowed from TestExtendedAcls
+ *
+ * @param scope
+ * AclEntryScope scope of the ACL entry
+ * @param type
+ * AclEntryType ACL entry type
+ * @param permission
+ * FsAction set of permissions in the ACL entry
+ * @return AclEntry new AclEntry
+ */
+ private AclEntry aclEntry(AclEntryScope scope, AclEntryType type,
+ FsAction permission) {
+ return new AclEntry.Builder().setScope(scope).setType(type)
+ .setPermission(permission).build();
+ }
+
+ /**
+ * Create a new AclEntry with scope, type, name and permission.
+ * Borrowed from TestExtendedAcls
+ *
+ * @param scope
+ * AclEntryScope scope of the ACL entry
+ * @param type
+ * AclEntryType ACL entry type
+ * @param name
+ * String optional ACL entry name
+ * @param permission
+ * FsAction set of permissions in the ACL entry
+ * @return AclEntry new AclEntry
+ */
+ private AclEntry aclEntry(AclEntryScope scope, AclEntryType type,
+ String name, FsAction permission) {
+ return new AclEntry.Builder().setScope(scope).setType(type).setName(name)
+ .setPermission(permission).build();
+ }
+
+ protected void allowCreateDatabase(String userName)
+ throws Exception {
+ allowWriteAccessViaAcl(userName, warehouseDir.toString());
+ }
+
+ protected void disallowCreateDatabase(String userName)
+ throws Exception {
+ disallowWriteAccessViaAcl(userName, warehouseDir.toString());
+ }
+
+ @Override
+ protected void allowCreateInDb(String dbName, String userName, String location)
+ throws Exception {
+ allowWriteAccessViaAcl(userName, location);
+ }
+
+ @Override
+ protected void disallowCreateInDb(String dbName, String userName, String location)
+ throws Exception {
+ disallowWriteAccessViaAcl(userName, location);
+ }
+
+ @Override
+ protected void allowCreateInTbl(String tableName, String userName, String location)
+ throws Exception{
+ allowWriteAccessViaAcl(userName, location);
+ }
+
+
+ @Override
+ protected void disallowCreateInTbl(String tableName, String userName, String location)
+ throws Exception {
+ disallowWriteAccessViaAcl(userName, location);
+ }
+
+ @Override
+ protected void allowDropOnTable(String tblName, String userName, String location)
+ throws Exception {
+ allowWriteAccessViaAcl(userName, location);
+ }
+
+ @Override
+ protected void allowDropOnDb(String dbName, String userName, String location)
+ throws Exception {
+ allowWriteAccessViaAcl(userName, location);
+ }
+}
Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java Wed Aug 6 22:14:05 2014
@@ -72,6 +72,9 @@ public class TestMetastoreAuthorizationP
return DefaultHiveMetastoreAuthorizationProvider.class.getName();
}
+ protected HiveConf createHiveConf() throws Exception {
+ return new HiveConf(this.getClass());
+ }
@Override
protected void setUp() throws Exception {
@@ -92,7 +95,7 @@ public class TestMetastoreAuthorizationP
MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
- clientHiveConf = new HiveConf(this.getClass());
+ clientHiveConf = createHiveConf();
// Turn off client-side authorization
clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false);
@@ -134,10 +137,23 @@ public class TestMetastoreAuthorizationP
return "smp_ms_tbl";
}
+ protected boolean isTestEnabled() {
+ return true;
+ }
+
+ protected String setupUser() {
+ return ugi.getUserName();
+ }
+
public void testSimplePrivileges() throws Exception {
+ if (!isTestEnabled()) {
+ System.out.println("Skipping test " + this.getClass().getName());
+ return;
+ }
+
String dbName = getTestDbName();
String tblName = getTestTableName();
- String userName = ugi.getUserName();
+ String userName = setupUser();
allowCreateDatabase(userName);
Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java Wed Aug 6 22:14:05 2014
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.security;
import java.net.URI;
+import java.security.AccessControlException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -81,7 +82,7 @@ public class TestStorageBasedMetastoreAu
setPermissions(location,"-rwxr--r--");
}
- private void setPermissions(String locn, String permissions) throws Exception {
+ protected void setPermissions(String locn, String permissions) throws Exception {
FileSystem fs = FileSystem.get(new URI(locn), clientHiveConf);
fs.setPermission(new Path(locn), FsPermission.valueOf(permissions));
}
@@ -89,7 +90,7 @@ public class TestStorageBasedMetastoreAu
@Override
protected void assertNoPrivileges(MetaException me){
assertNotNull(me);
- assertTrue(me.getMessage().indexOf("not permitted") != -1);
+ assertTrue(me.getMessage().indexOf("AccessControlException") != -1);
}
@Override
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java Wed Aug 6 22:14:05 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.securi
import java.io.FileNotFoundException;
import java.io.IOException;
import java.security.AccessControlException;
+import java.security.PrivilegedExceptionAction;
import java.util.EnumSet;
import java.util.List;
@@ -35,6 +36,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.Database;
@@ -44,6 +48,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.shims.ShimLoader;
/**
* StorageBasedAuthorizationProvider is an implementation of
@@ -288,7 +293,7 @@ public class StorageBasedAuthorizationPr
* If the given path does not exists, it checks for its parent folder.
*/
protected void checkPermissions(final Configuration conf, final Path path,
- final EnumSet<FsAction> actions) throws IOException, LoginException {
+ final EnumSet<FsAction> actions) throws IOException, LoginException, HiveException {
if (path == null) {
throw new IllegalArgumentException("path is null");
@@ -297,8 +302,7 @@ public class StorageBasedAuthorizationPr
final FileSystem fs = path.getFileSystem(conf);
if (fs.exists(path)) {
- checkPermissions(fs, path, actions,
- authenticator.getUserName(), authenticator.getGroupNames());
+ checkPermissions(fs, path, actions, authenticator.getUserName());
} else if (path.getParent() != null) {
// find the ancestor which exists to check its permissions
Path par = path.getParent();
@@ -309,8 +313,7 @@ public class StorageBasedAuthorizationPr
par = par.getParent();
}
- checkPermissions(fs, par, actions,
- authenticator.getUserName(), authenticator.getGroupNames());
+ checkPermissions(fs, par, actions, authenticator.getUserName());
}
}
@@ -320,56 +323,23 @@ public class StorageBasedAuthorizationPr
*/
@SuppressWarnings("deprecation")
protected static void checkPermissions(final FileSystem fs, final Path path,
- final EnumSet<FsAction> actions, String user, List<String> groups) throws IOException,
- AccessControlException {
-
- String superGroupName = getSuperGroupName(fs.getConf());
- if (userBelongsToSuperGroup(superGroupName, groups)) {
- LOG.info("User \"" + user + "\" belongs to super-group \"" + superGroupName + "\". " +
- "Permission granted for actions: (" + actions + ").");
- return;
- }
-
- final FileStatus stat;
+ final EnumSet<FsAction> actions, String user) throws IOException,
+ AccessControlException, HiveException {
try {
- stat = fs.getFileStatus(path);
+ FileStatus stat = fs.getFileStatus(path);
+ for (FsAction action : actions) {
+ FileUtils.checkFileAccessWithImpersonation(fs, stat, action, user);
+ }
} catch (FileNotFoundException fnfe) {
// File named by path doesn't exist; nothing to validate.
return;
} catch (org.apache.hadoop.fs.permission.AccessControlException ace) {
// Older hadoop version will throw this @deprecated Exception.
throw accessControlException(ace);
+ } catch (Exception err) {
+ throw new HiveException(err);
}
-
- final FsPermission dirPerms = stat.getPermission();
- final String grp = stat.getGroup();
-
- for (FsAction action : actions) {
- if (user.equals(stat.getOwner())) {
- if (dirPerms.getUserAction().implies(action)) {
- continue;
- }
- }
- if (groups.contains(grp)) {
- if (dirPerms.getGroupAction().implies(action)) {
- continue;
- }
- }
- if (dirPerms.getOtherAction().implies(action)) {
- continue;
- }
- throw new AccessControlException("action " + action + " not permitted on path "
- + path + " for user " + user);
- }
- }
-
- private static String getSuperGroupName(Configuration configuration) {
- return configuration.get(DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY, "");
- }
-
- private static boolean userBelongsToSuperGroup(String superGroupName, List<String> groups) {
- return groups.contains(superGroupName);
}
protected Path getDbLocation(Database db) throws HiveException {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java Wed Aug 6 22:14:05 2014
@@ -394,7 +394,7 @@ public class SQLAuthorizationUtils {
if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, userName, FsAction.READ)) {
availPrivs.addPrivilege(SQLPrivTypeGrant.SELECT_NOGRANT);
}
- } catch (IOException e) {
+ } catch (Exception e) {
String msg = "Error getting permissions for " + filePath + ": " + e.getMessage();
throw new HiveAuthzPluginException(msg, e);
}
Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Wed Aug 6 22:14:05 2014
@@ -25,6 +25,7 @@ import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
+import java.security.AccessControlException;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
@@ -43,6 +44,7 @@ import javax.security.auth.login.LoginEx
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.DefaultFileAccess;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -52,6 +54,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.ProxyFileSystem;
import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
import org.apache.hadoop.io.LongWritable;
@@ -880,4 +883,10 @@ public class Hadoop20Shims implements Ha
LOG.debug(ArrayUtils.toString(command));
shell.run(command);
}
+
+ @Override
+ public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+ throws IOException, AccessControlException, Exception {
+ DefaultFileAccess.checkFileAccess(fs, stat, action);
+ }
}
Modified: hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Wed Aug 6 22:14:05 2014
@@ -19,10 +19,15 @@ package org.apache.hadoop.hive.shims;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URI;
+import java.security.AccessControlException;
+import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
@@ -32,6 +37,7 @@ import org.apache.commons.lang.ArrayUtil
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.DefaultFileAccess;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -651,6 +657,24 @@ public class Hadoop23Shims extends Hadoo
}
};
}
+
+ /**
+ * Proxy file system also needs to override the access() method behavior.
+ * Cannot add Override annotation since FileSystem.access() may not exist in
+ * the version of hadoop used to build Hive.
+ */
+ public void access(Path path, FsAction action) throws AccessControlException,
+ FileNotFoundException, IOException, Exception {
+ Path underlyingFsPath = swizzleParamPath(path);
+ FileStatus underlyingFsStatus = fs.getFileStatus(underlyingFsPath);
+ if (accessMethod != null) {
+ accessMethod.invoke(fs, underlyingFsPath, action);
+ } else {
+ // If the FS has no access() method, we can try DefaultFileAccess ..
+ UserGroupInformation ugi = getUGIForConf(getConf());
+ DefaultFileAccess.checkFileAccess(fs, underlyingFsStatus, action);
+ }
+ }
}
@Override
@@ -709,4 +733,50 @@ public class Hadoop23Shims extends Hadoo
public void getMergedCredentials(JobConf jobConf) throws IOException {
jobConf.getCredentials().mergeAll(UserGroupInformation.getCurrentUser().getCredentials());
}
+
+ protected static final Method accessMethod;
+
+ static {
+ Method m = null;
+ try {
+ m = FileSystem.class.getMethod("access", Path.class, FsAction.class);
+ } catch (NoSuchMethodException err) {
+ // This version of Hadoop does not support FileSystem.access().
+ }
+ accessMethod = m;
+ }
+
+ @Override
+ public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+ throws IOException, AccessControlException, Exception {
+ try {
+ if (accessMethod == null) {
+ // Have to rely on Hive implementation of filesystem permission checks.
+ DefaultFileAccess.checkFileAccess(fs, stat, action);
+ } else {
+ accessMethod.invoke(fs, stat.getPath(), action);
+ }
+ } catch (Exception err) {
+ throw wrapAccessException(err);
+ }
+ }
+
+ /**
+ * If there is an AccessException buried somewhere in the chain of failures, wrap the original
+ * exception in an AccessException. Othewise just return the original exception.
+ */
+ private static Exception wrapAccessException(Exception err) {
+ final int maxDepth = 20;
+ Throwable curErr = err;
+ for (int idx = 0; curErr != null && idx < maxDepth; ++idx) {
+ if (curErr instanceof org.apache.hadoop.security.AccessControlException
+ || curErr instanceof org.apache.hadoop.fs.permission.AccessControlException) {
+ Exception newErr = new AccessControlException(curErr.getMessage());
+ newErr.initCause(err);
+ return newErr;
+ }
+ curErr = curErr.getCause();
+ }
+ return err;
+ }
}
Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Wed Aug 6 22:14:05 2014
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.URI;
import java.net.URISyntaxException;
+import java.security.AccessControlException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
@@ -32,14 +33,19 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import javax.security.auth.login.LoginException;
+
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DefaultFileAccess;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
@@ -663,4 +669,10 @@ public abstract class HadoopShimsSecure
Collections.addAll(dedup, locations);
return dedup.toArray(new String[dedup.size()]);
}
+
+ @Override
+ public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+ throws IOException, AccessControlException, Exception {
+ DefaultFileAccess.checkFileAccess(fs, stat, action);
+ }
}
Added: hive/trunk/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java?rev=1616364&view=auto
==============================================================================
--- hive/trunk/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java (added)
+++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java Wed Aug 6 22:14:05 2014
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.security.AccessControlException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.List;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * Implements the default file access logic for HadoopShims.checkFileAccess(), for Hadoop
+ * versions which do not implement FileSystem.access().
+ *
+ */
+public class DefaultFileAccess {
+
+ private static Log LOG = LogFactory.getLog(DefaultFileAccess.class);
+
+ private static List<String> emptyGroups = new ArrayList<String>(0);
+
+ public static void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+ throws IOException, AccessControlException, LoginException {
+ // Get the user/groups for checking permissions based on the current UGI.
+ UserGroupInformation currentUgi = ShimLoader.getHadoopShims().getUGIForConf(fs.getConf());
+ DefaultFileAccess.checkFileAccess(fs, stat, action,
+ ShimLoader.getHadoopShims().getShortUserName(currentUgi),
+ Arrays.asList(currentUgi.getGroupNames()));
+ }
+
+ public static void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action,
+ String user, List<String> groups) throws IOException, AccessControlException {
+
+ if (groups == null) {
+ groups = emptyGroups;
+ }
+
+ String superGroupName = getSuperGroupName(fs.getConf());
+ if (userBelongsToSuperGroup(superGroupName, groups)) {
+ LOG.info("User \"" + user + "\" belongs to super-group \"" + superGroupName + "\". " +
+ "Permission granted for action: " + action + ".");
+ return;
+ }
+
+ final FsPermission dirPerms = stat.getPermission();
+ final String grp = stat.getGroup();
+
+ if (user.equals(stat.getOwner())) {
+ if (dirPerms.getUserAction().implies(action)) {
+ return;
+ }
+ } else if (groups.contains(grp)) {
+ if (dirPerms.getGroupAction().implies(action)) {
+ return;
+ }
+ } else if (dirPerms.getOtherAction().implies(action)) {
+ return;
+ }
+ throw new AccessControlException("action " + action + " not permitted on path "
+ + stat.getPath() + " for user " + user);
+ }
+
+ private static String getSuperGroupName(Configuration configuration) {
+ return configuration.get("dfs.permissions.supergroup", "");
+ }
+
+ private static boolean userBelongsToSuperGroup(String superGroupName, List<String> groups) {
+ return groups.contains(superGroupName);
+ }
+}
Modified: hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1616364&r1=1616363&r2=1616364&view=diff
==============================================================================
--- hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Wed Aug 6 22:14:05 2014
@@ -25,6 +25,7 @@ import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
+import java.security.AccessControlException;
import java.security.PrivilegedExceptionAction;
import java.util.Comparator;
import java.util.List;
@@ -42,6 +43,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.InputSplit;
@@ -669,4 +671,18 @@ public interface HadoopShims {
public void getMergedCredentials(JobConf jobConf) throws IOException;
+ /**
+ * Check if the configured UGI has access to the path for the given file system action.
+ * Method will return successfully if action is permitted. AccessControlExceptoin will
+ * be thrown if user does not have access to perform the action. Other exceptions may
+ * be thrown for non-access related errors.
+ * @param fs
+ * @param status
+ * @param action
+ * @throws IOException
+ * @throws AccessControlException
+ * @throws Exception
+ */
+ public void checkFileAccess(FileSystem fs, FileStatus status, FsAction action)
+ throws IOException, AccessControlException, Exception;
}