You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2010/09/30 01:49:32 UTC

svn commit: r1002896 - in /hadoop/common/trunk: CHANGES.txt src/java/org/apache/hadoop/ipc/Server.java src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java src/test/core/org/apache/hadoop/ipc/TestRPC.java

Author: tomwhite
Date: Wed Sep 29 23:49:32 2010
New Revision: 1002896

URL: http://svn.apache.org/viewvc?rev=1002896&view=rev
Log:
HADOOP-6951.  Distinct minicluster services (e.g. NN and JT) overwrite each other's service policies.  Contributed by Aaron T. Myers.

Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1002896&r1=1002895&r2=1002896&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Wed Sep 29 23:49:32 2010
@@ -250,6 +250,9 @@ Trunk (unreleased changes)
     HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport.
     (Tom White via eli).
 
+    HADOOP-6951.  Distinct minicluster services (e.g. NN and JT) overwrite each
+    other's service policies.  (Aaron T. Myers via tomwhite)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=1002896&r1=1002895&r2=1002896&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java Wed Sep 29 23:49:32 2010
@@ -60,6 +60,7 @@ import javax.security.sasl.SaslServer;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.BytesWritable;
@@ -78,6 +79,7 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.SecretManager;
@@ -182,6 +184,7 @@ public abstract class Server {
   
   private Configuration conf;
   private SecretManager<TokenIdentifier> secretManager;
+  private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager();
 
   private int maxQueueSize;
   private final int maxRespSize;
@@ -239,6 +242,22 @@ public abstract class Server {
     return rpcMetrics;
   }
 
+  /**
+   * Refresh the service authorization ACL for the service handled by this server.
+   */
+  public void refreshServiceAcl(Configuration conf, PolicyProvider provider) {
+    serviceAuthorizationManager.refresh(conf, provider);
+  }
+
+  /**
+   * Returns a handle to the serviceAuthorizationManager (required in tests)
+   * @return instance of ServiceAuthorizationManager for this server
+   */
+  @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+  public ServiceAuthorizationManager getServiceAuthorizationManager() {
+    return serviceAuthorizationManager;
+  }
+
   /** A call queued for handling. */
   private static class Call {
     private int id;                               // the client's call id
@@ -1652,7 +1671,7 @@ public abstract class Server {
         throw new AuthorizationException("Unknown protocol: " + 
                                          connection.getProtocol());
       }
-      ServiceAuthorizationManager.authorize(user, protocol, getConf(), hostname);
+      serviceAuthorizationManager.authorize(user, protocol, getConf(), hostname);
     }
   }
   

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1002896&r1=1002895&r2=1002896&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Wed Sep 29 23:49:32 2010
@@ -20,6 +20,7 @@ package org.apache.hadoop.security.autho
 import java.io.IOException;
 import java.util.IdentityHashMap;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -43,7 +44,7 @@ public class ServiceAuthorizationManager
   private static final Log LOG = LogFactory
   .getLog(ServiceAuthorizationManager.class);
 
-  private static Map<Class<?>, AccessControlList> protocolToAcl =
+  private Map<Class<?>, AccessControlList> protocolToAcl =
     new IdentityHashMap<Class<?>, AccessControlList>();
   
   /**
@@ -73,7 +74,7 @@ public class ServiceAuthorizationManager
    * @param hostname fully qualified domain name of the client
    * @throws AuthorizationException on authorization failure
    */
-  public static void authorize(UserGroupInformation user, 
+  public void authorize(UserGroupInformation user, 
                                Class<?> protocol,
                                Configuration conf,
                                String hostname
@@ -129,7 +130,7 @@ public class ServiceAuthorizationManager
     AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
   }
 
-  public static synchronized void refresh(Configuration conf,
+  public synchronized void refresh(Configuration conf,
                                           PolicyProvider provider) {
     // Get the system property 'hadoop.policy.file'
     String policyFile = 
@@ -158,4 +159,9 @@ public class ServiceAuthorizationManager
     // Flip to the newly parsed permissions
     protocolToAcl = newAcls;
   }
+
+  // Package-protected for use in tests.
+  Set<Class<?>> getProtocolsWithAcls() {
+    return protocolToAcl.keySet();
+  }
 }

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java?rev=1002896&r1=1002895&r2=1002896&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java Wed Sep 29 23:49:32 2010
@@ -41,7 +41,6 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.Service;
-import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.security.AccessControlException;
 
 import static org.mockito.Mockito.*;
@@ -364,11 +363,11 @@ public class TestRPC extends TestCase {
   }
   
   private void doRPCs(Configuration conf, boolean expectFailure) throws Exception {
-    ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider());
-    
     Server server = RPC.getServer(TestProtocol.class,
                                   new TestImpl(), ADDRESS, 0, 5, true, conf, null);
 
+    server.refreshServiceAcl(conf, new TestPolicyProvider());
+
     TestProtocol proxy = null;
 
     server.start();