You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ec...@apache.org on 2015/10/17 00:20:55 UTC

hbase git commit: HBASE-14597 Fix Groups cache in multi-threaded env

Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 e12e0e424 -> 190bdbf45


HBASE-14597 Fix Groups cache in multi-threaded env


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/190bdbf4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/190bdbf4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/190bdbf4

Branch: refs/heads/branch-1.2
Commit: 190bdbf45604bb0fc4136193e36dfe2d91075d16
Parents: e12e0e4
Author: Elliott Clark <ec...@apache.org>
Authored: Tue Oct 13 08:30:12 2015 -0700
Committer: Elliott Clark <ec...@apache.org>
Committed: Fri Oct 16 15:19:42 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/security/User.java  | 44 +++++++++++-
 .../hadoop/hbase/security/UserProvider.java     | 40 ++++++++---
 .../apache/hadoop/hbase/security/TestUser.java  | 74 ++++++++++++++++++--
 3 files changed, 142 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/190bdbf4/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
index 44197db..223aada 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java
@@ -23,7 +23,11 @@ import java.io.IOException;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
 import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.ExecutionException;
 import com.google.common.cache.LoadingCache;
 import org.apache.hadoop.conf.Configuration;
@@ -32,6 +36,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Methods;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
@@ -282,7 +287,7 @@ public abstract class User {
   @InterfaceAudience.Private
    public static final class SecureHadoopUser extends User {
     private String shortName;
-    private LoadingCache<UserGroupInformation, String[]> cache;
+    private LoadingCache<String, String[]> cache;
 
     public SecureHadoopUser() throws IOException {
       ugi = UserGroupInformation.getCurrentUser();
@@ -295,7 +300,7 @@ public abstract class User {
     }
 
     public SecureHadoopUser(UserGroupInformation ugi,
-                            LoadingCache<UserGroupInformation, String[]> cache) {
+                            LoadingCache<String, String[]> cache) {
       this.ugi = ugi;
       this.cache = cache;
     }
@@ -316,7 +321,7 @@ public abstract class User {
     public String[] getGroupNames() {
       if (cache != null) {
         try {
-          return this.cache.get(ugi);
+          return this.cache.get(getShortName());
         } catch (ExecutionException e) {
           return new String[0];
         }
@@ -387,6 +392,13 @@ public abstract class User {
     /** @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[]) */
     public static User createUserForTesting(Configuration conf,
         String name, String[] groups) {
+      synchronized (UserProvider.class) {
+        if (!(UserProvider.groups instanceof TestingGroups)) {
+          UserProvider.groups = new TestingGroups(UserProvider.groups);
+        }
+      }
+
+      ((TestingGroups)UserProvider.groups).setUserGroups(name, groups);
       return new SecureHadoopUser(UserGroupInformation.createUserForTesting(name, groups));
     }
 
@@ -416,4 +428,30 @@ public abstract class User {
       return UserGroupInformation.isSecurityEnabled();
     }
   }
+
+  static class TestingGroups extends Groups {
+    private final Map<String, List<String>> userToGroupsMapping =
+        new HashMap<String,List<String>>();
+    private Groups underlyingImplementation;
+
+    TestingGroups(Groups underlyingImplementation) {
+      super(new Configuration());
+      this.underlyingImplementation = underlyingImplementation;
+    }
+
+    @Override
+    public List<String> getGroups(String user) throws IOException {
+      List<String> result = userToGroupsMapping.get(user);
+
+      if (result == null) {
+        result = underlyingImplementation.getGroups(user);
+      }
+
+      return result;
+    }
+
+    private void setUserGroups(String user, String[] groups) {
+      userToGroupsMapping.put(user, Arrays.asList(groups));
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/190bdbf4/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 33b8a94..7e6de4a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -18,6 +18,8 @@
 package org.apache.hadoop.hbase.security;
 
 import java.io.IOException;
+import java.util.LinkedHashSet;
+import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
@@ -33,6 +35,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.hbase.BaseConfigurable;
+import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -48,12 +51,20 @@ public class UserProvider extends BaseConfigurable {
           1,
           new ThreadFactoryBuilder().setDaemon(true).setNameFormat("group-cache-%d").build()));
 
-  private LoadingCache<UserGroupInformation, String[]> groupCache = null;
+  private LoadingCache<String, String[]> groupCache = null;
 
+  static Groups groups = Groups.getUserToGroupsMappingService();
 
   @Override
-  public void setConf(Configuration conf) {
+  public void setConf(final Configuration conf) {
     super.setConf(conf);
+
+    synchronized (UserProvider.class) {
+      if (!(groups instanceof User.TestingGroups)) {
+        groups = Groups.getUserToGroupsMappingService(conf);
+      }
+    }
+
     long cacheTimeout =
         getConf().getLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS,
             CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS_DEFAULT) * 1000;
@@ -67,21 +78,34 @@ public class UserProvider extends BaseConfigurable {
         .concurrencyLevel(20)
             // create the loader
             // This just delegates to UGI.
-        .build(new CacheLoader<UserGroupInformation, String[]>() {
+        .build(new CacheLoader<String, String[]>() {
+
+          // Since UGI's don't hash based on the user id
+          // The cache needs to be keyed on the same thing that Hadoop's Groups class
+          // uses. So this cache uses shortname.
           @Override
-          public String[] load(UserGroupInformation ugi) throws Exception {
-            return ugi.getGroupNames();
+          public String[] load(String ugi) throws Exception {
+            return getGroupStrings(ugi);
+          }
+
+          private String[] getGroupStrings(String ugi) {
+            try {
+              Set<String> result = new LinkedHashSet<String>(groups.getGroups(ugi));
+              return result.toArray(new String[result.size()]);
+            } catch (Exception e) {
+              return new String[0];
+            }
           }
 
           // Provide the reload function that uses the executor thread.
-          public ListenableFuture<String[]> reload(final UserGroupInformation k,
+          public ListenableFuture<String[]> reload(final String k,
                                                    String[] oldValue) throws Exception {
 
             return executor.submit(new Callable<String[]>() {
-              UserGroupInformation userGroupInformation = k;
+
               @Override
               public String[] call() throws Exception {
-                return userGroupInformation.getGroupNames();
+                return getGroupStrings(k);
               }
             });
           }

http://git-wip-us.apache.org/repos/asf/hbase/blob/190bdbf4/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
index 0aeac7a..3423289 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java
@@ -18,31 +18,95 @@
  */
 package org.apache.hadoop.hbase.security;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
 import java.io.IOException;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 
+import org.apache.commons.lang.SystemUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import com.google.common.collect.ImmutableSet;
 
+import static org.junit.Assert.*;
+
 @Category(SmallTests.class)
 public class TestUser {
   private static final Log LOG = LogFactory.getLog(TestUser.class);
 
   @Test
+  public void testCreateUserForTestingGroupCache() throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    User uCreated = User.createUserForTesting(conf, "group_user", new String[] { "MYGROUP" });
+    UserProvider up = UserProvider.instantiate(conf);
+    User uProvided = up.create(UserGroupInformation.createRemoteUser("group_user"));
+    assertArrayEquals(uCreated.getGroupNames(), uProvided.getGroupNames());
+
+  }
+
+  @Test
+  public void testCacheGetGroups() throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    UserProvider up = UserProvider.instantiate(conf);
+
+    // VERY unlikely that this user will exist on the box.
+    // This should mean the user has no groups.
+    String nonUser = "kklvfnvhdhcenfnniilggljhdecjhidkle";
+
+    // Create two UGI's for this username
+    UserGroupInformation ugiOne = UserGroupInformation.createRemoteUser(nonUser);
+    UserGroupInformation ugiTwo = UserGroupInformation.createRemoteUser(nonUser);
+
+    // Now try and get the user twice.
+    User uOne = up.create(ugiOne);
+    User uTwo = up.create(ugiTwo);
+
+    // Make sure that we didn't break groups and everything worked well.
+    assertArrayEquals(uOne.getGroupNames(),uTwo.getGroupNames());
+
+    // Check that they are referentially equal.
+    // Since getting a group for a users that doesn't exist creates a new string array
+    // the only way that they should be referentially equal is if the cache worked and
+    // made sure we didn't go to hadoop's script twice.
+    assertTrue(uOne.getGroupNames() == uTwo.getGroupNames());
+    assertEquals(0, ugiOne.getGroupNames().length);
+  }
+
+  @Test
+  public void testCacheGetGroupsRoot() throws Exception {
+    // Windows users don't have a root user.
+    // However pretty much every other *NIX os will have root.
+    if (!SystemUtils.IS_OS_WINDOWS) {
+      Configuration conf = HBaseConfiguration.create();
+      UserProvider up = UserProvider.instantiate(conf);
+
+
+      String rootUserName = "root";
+
+      // Create two UGI's for this username
+      UserGroupInformation ugiOne = UserGroupInformation.createRemoteUser(rootUserName);
+      UserGroupInformation ugiTwo = UserGroupInformation.createRemoteUser(rootUserName);
+
+      // Now try and get the user twice.
+      User uOne = up.create(ugiOne);
+      User uTwo = up.create(ugiTwo);
+
+      // Make sure that we didn't break groups and everything worked well.
+      assertArrayEquals(uOne.getGroupNames(),uTwo.getGroupNames());
+      String[] groupNames = ugiOne.getGroupNames();
+      assertTrue(groupNames.length > 0);
+    }
+  }
+
+
+  @Test
   public void testBasicAttributes() throws Exception {
     Configuration conf = HBaseConfiguration.create();
     User user = User.createUserForTesting(conf, "simple", new String[]{"foo"});