You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 04:44:57 UTC
svn commit: r1077137 [1/5] - in
/hadoop/common/branches/branch-0.20-security-patches: ./
.eclipse.templates/ ivy/
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/
src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/
src/contrib/stream...
Author: omalley
Date: Fri Mar 4 03:44:54 2011
New Revision: 1077137
URL: http://svn.apache.org/viewvc?rev=1077137&view=rev
Log:
commit ba9b91a42b4e051e1741a7b181e53679857389b8
Author: Jakob Homan <jh...@yahoo-inc.com>
Date: Sun Jan 31 20:00:10 2010 -0800
HADOOP-6299 from
https://issues.apache.org/jira/secure/attachment/12434362/HADOOP-6299-Y20.patch
+++ b/YAHOO-CHANGES.txt
+ HADOOP-6299. Reimplement the UserGroupInformation to use the OS
+ specific and Kerberos JAAS login. (jhoman, ddas, oom)
+
Added:
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/AccessControlList.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/TestUserGroupInformation.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/authorize/TestAccessControlList.java
Removed:
hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUgiManager.java
hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUgiManager.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/Group.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/PermissionChecker.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/SecurityUtil.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UnixUserGroupInformation.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ConfiguredPolicy.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ConnectionPermission.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/TestAccessControlList.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/TestUnixUserGroupInformation.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
Modified:
hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath
hadoop/common/branches/branch-0.20-security-patches/ivy.xml
hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties
hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
hadoop/common/branches/branch-0.20-security-patches/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/FileSystem.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/ConnectionHeader.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/RPC.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/Server.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/GroupMappingServiceProvider.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/Groups.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/User.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UserGroupInformation.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/Service.java
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSUtil.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/AccessTokenHandler.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/balancer/Balancer.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/PermissionChecker.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DFSAdmin.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DFSck.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/IsolationRunner.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobClient.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistory.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobInProgress.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobQueueClient.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobTracker.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/QueueManager.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/Task.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/tools/MRAdmin.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/JobSubmissionFiles.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/split/SplitMetaInfoReader.java
hadoop/common/branches/branch-0.20-security-patches/src/test/hdfs-site.xml
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/cli/testConf.xml
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/filecache/TestTrackerDistributedCacheManager.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestCopyFiles.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/fs/TestFileSystem.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/AppendTestUtil.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/DFSTestUtil.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSPermission.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSShell.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileAppend2.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileCreation.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestGetBlocks.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestLeaseRecovery2.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestQuota.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/ipc/TestRPC.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/ClusterWithLinuxTaskController.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/MiniMRCluster.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestIsolationRunner.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobHistory.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobQueueInformation.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobTrackerRestart.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestLocalizationWithLinuxTaskController.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestLostTracker.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestMapredSystemDir.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFSWithDistinctUsers.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestNodeRefresh.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestQueueAclsForCurrentUser.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestQueueManager.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestRecoveryManager.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestSubmitJob.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/TestGroupMappingServiceRefresh.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/TestMapredGroupMappingServiceRefresh.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/TestPermission.java
hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java
hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCh.java
hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCp.java
hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/HadoopArchives.java
Modified: hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath (original)
+++ hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath Fri Mar 4 03:44:54 2011
@@ -40,7 +40,8 @@
<classpathentry kind="lib" path="src/test/lib/ftplet-api-1.0.0-SNAPSHOT.jar"/>
<classpathentry kind="lib" path="src/test/lib/ftpserver-core-1.0.0-SNAPSHOT.jar"/>
<classpathentry kind="lib" path="src/test/lib/ftpserver-server-1.0.0-SNAPSHOT.jar"/>
- <classpathentry kind="lib" path="src/test/lib/mina-core-2.0.0-M2-20080407.124109-12.jar"/>
+ <classpathentry kind="lib" path="src/test/lib/mina-core-2.0.0-M2-20080407.124109-12.jar"/>
+ <classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/mockito-all-1.8.0.jar"/>i
<classpathentry kind="lib" path="build/test/classes"/>
<classpathentry kind="output" path="build/eclipse-classes"/>
</classpath>
Modified: hadoop/common/branches/branch-0.20-security-patches/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy.xml?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy.xml Fri Mar 4 03:44:54 2011
@@ -270,6 +270,11 @@
rev="${aspectj.version}"
conf="common->default">
</dependency>
+ <dependency org="org.mockito"
+ name="mockito-all"
+ rev="${mockito-all.version}"
+ conf="common->default">
+ </dependency>
</dependencies>
</ivy-module>
Modified: hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties Fri Mar 4 03:44:54 2011
@@ -59,6 +59,8 @@ kfs.version=0.1
log4j.version=1.2.15
lucene-core.version=2.3.1
+mockito-all.version=1.8.0
+
oro.version=2.0.8
rats-lib.version=0.5.1
Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java Fri Mar 4 03:44:54 2011
@@ -26,7 +26,7 @@ import javax.servlet.http.HttpServletReq
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
/** {@inheritDoc} */
public class ProxyFileDataServlet extends FileDataServlet {
@@ -35,17 +35,17 @@ public class ProxyFileDataServlet extend
/** {@inheritDoc} */
@Override
- protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
+ protected URI createUri(FileStatus i, UserGroupInformation ugi,
ClientProtocol nnproxy, HttpServletRequest request) throws IOException,
URISyntaxException {
return new URI(request.getScheme(), null, request.getServerName(), request
.getServerPort(), "/streamFile", "filename=" + i.getPath() + "&ugi="
- + ugi, null);
+ + ugi.getUserName(), null);
}
/** {@inheritDoc} */
@Override
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+ protected UserGroupInformation getUGI(HttpServletRequest request) {
+ return (UserGroupInformation) request.getAttribute("authorized.ugi");
}
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java Fri Mar 4 03:44:54 2011
@@ -42,7 +42,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
public class ProxyFilter implements Filter {
public static final Log LOG = LogFactory.getLog(ProxyFilter.class);
@@ -50,9 +50,6 @@ public class ProxyFilter implements Filt
/** Pattern for triggering reload of user permissions */
protected static final Pattern RELOAD_PATTERN = Pattern
.compile("^(/reloadPermFiles)$");
- /** Pattern for triggering clearing of ugi Cache */
- protected static final Pattern CLEAR_PATTERN = Pattern
- .compile("^(/clearUgiCache)$");
/** Pattern for a filter to find out if a request is HFTP/HSFTP request */
protected static final Pattern HFTP_PATTERN = Pattern
.compile("^(/listPaths|/data|/streamFile)$");
@@ -252,12 +249,6 @@ public class ProxyFilter implements Filt
LOG.info("User permissions and user certs files reloaded");
rsp.setStatus(HttpServletResponse.SC_OK);
return;
- } else if (CLEAR_PATTERN.matcher(servletPath).matches()
- && checkUser("Admin", certs[0])) {
- ProxyUgiManager.clearCache();
- LOG.info("Ugi cache cleared");
- rsp.setStatus(HttpServletResponse.SC_OK);
- return;
}
if (!isAuthorized) {
@@ -265,19 +256,11 @@ public class ProxyFilter implements Filt
return;
}
// request is authorized, set ugi for servlets
- UnixUserGroupInformation ugi = ProxyUgiManager
- .getUgiForUser(userID);
- if (ugi == null) {
- LOG.info("Can't retrieve ugi for user " + userID);
- rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
- "Can't retrieve ugi for user " + userID);
- return;
- }
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userID);
rqst.setAttribute("authorized.ugi", ugi);
} else { // http request, set ugi for servlets, only for testing purposes
String ugi = rqst.getParameter("ugi");
- rqst.setAttribute("authorized.ugi", new UnixUserGroupInformation(ugi
- .split(",")));
+ rqst.setAttribute("authorized.ugi", UserGroupInformation.createRemoteUser(ugi));
}
chain.doFilter(request, response);
Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java Fri Mar 4 03:44:54 2011
@@ -20,7 +20,7 @@ package org.apache.hadoop.hdfsproxy;
import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
/** {@inheritDoc} */
public class ProxyListPathsServlet extends ListPathsServlet {
@@ -29,7 +29,7 @@ public class ProxyListPathsServlet exten
/** {@inheritDoc} */
@Override
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+ protected UserGroupInformation getUGI(HttpServletRequest request) {
+ return (UserGroupInformation) request.getAttribute("authorized.ugi");
}
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java Fri Mar 4 03:44:54 2011
@@ -19,13 +19,14 @@ package org.apache.hadoop.hdfsproxy;
import java.io.IOException;
import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.server.namenode.StreamFile;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.conf.Configuration;
/** {@inheritDoc} */
@@ -36,20 +37,26 @@ public class ProxyStreamFile extends Str
/** {@inheritDoc} */
@Override
protected DFSClient getDFSClient(HttpServletRequest request)
- throws IOException {
+ throws IOException, InterruptedException {
ServletContext context = getServletContext();
- Configuration conf = new Configuration((Configuration) context
+ final Configuration conf = new Configuration((Configuration) context
.getAttribute("name.conf"));
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, getUGI(request));
- InetSocketAddress nameNodeAddr = (InetSocketAddress) context
+ final InetSocketAddress nameNodeAddr = (InetSocketAddress) context
.getAttribute("name.node.address");
- return new DFSClient(nameNodeAddr, conf);
+ DFSClient client =
+ getUGI(request).doAs(new PrivilegedExceptionAction<DFSClient>() {
+ @Override
+ public DFSClient run() throws IOException {
+ return new DFSClient(nameNodeAddr, conf);
+ }
+ });
+
+ return client;
}
/** {@inheritDoc} */
@Override
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- return (UnixUserGroupInformation) request.getAttribute("authorized.ugi");
+ protected UserGroupInformation getUGI(HttpServletRequest request) {
+ return (UserGroupInformation) request.getAttribute("authorized.ugi");
}
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java Fri Mar 4 03:44:54 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.streaming;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -47,25 +48,33 @@ public class TestStreamingAsDifferentUse
return;
}
startCluster();
- JobConf myConf = getClusterConf();
- FileSystem inFs = inputPath.getFileSystem(myConf);
- FileSystem outFs = outputPath.getFileSystem(myConf);
- outFs.delete(outputPath, true);
- if (!inFs.mkdirs(inputPath)) {
- throw new IOException("Mkdirs failed to create " + inFs.toString());
- }
- DataOutputStream file = inFs.create(new Path(inputPath, "part-0"));
- file.writeBytes(input);
- file.close();
- String[] args =
- new String[] { "-input", inputPath.makeQualified(inFs).toString(),
+ final JobConf myConf = getClusterConf();
+ myConf.set("hadoop.job.history.user.location","none");
+ taskControllerUser.doAs(new PrivilegedExceptionAction<Void>() {
+ public Void run() throws IOException{
+
+ FileSystem inFs = inputPath.getFileSystem(myConf);
+ FileSystem outFs = outputPath.getFileSystem(myConf);
+ outFs.delete(outputPath, true);
+ if (!inFs.mkdirs(inputPath)) {
+ throw new IOException("Mkdirs failed to create " + inFs.toString());
+ }
+ DataOutputStream file = inFs.create(new Path(inputPath, "part-0"));
+ file.writeBytes(input);
+ file.close();
+ final String[] args =
+ new String[] { "-input", inputPath.makeQualified(inFs).toString(),
"-output", outputPath.makeQualified(outFs).toString(), "-mapper",
map, "-reducer", reduce, "-jobconf",
"keep.failed.task.files=true", "-jobconf",
"stream.tmpdir=" + System.getProperty("test.build.data", "/tmp") };
- StreamJob streamJob = new StreamJob(args, true);
- streamJob.setConf(myConf);
- assertTrue("Job has not succeeded", streamJob.go() == 0);
- assertOwnerShip(outputPath);
+
+ StreamJob streamJob = new StreamJob(args, true);
+ streamJob.setConf(myConf);
+ assertTrue("Job has not succeeded", streamJob.go() == 0);
+ assertOwnerShip(outputPath);
+ return null;
+ }
+ });
}
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/FileSystem.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/fs/FileSystem.java Fri Mar 4 03:44:54 2011
@@ -34,8 +34,6 @@ import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
-import javax.security.auth.login.LoginException;
-
import org.apache.commons.logging.*;
import org.apache.hadoop.conf.*;
@@ -979,9 +977,6 @@ public abstract class FileSystem extends
/** Default pattern character: Character set close. */
private static final char PAT_SET_CLOSE = ']';
- GlobFilter() {
- }
-
GlobFilter(String filePattern) throws IOException {
setRegex(filePattern);
}
@@ -1443,15 +1438,7 @@ public abstract class FileSystem extends
Key(URI uri, Configuration conf) throws IOException {
scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
- UserGroupInformation ugi = UserGroupInformation.readFrom(conf);
- if (ugi == null) {
- try {
- ugi = UserGroupInformation.login(conf);
- } catch(LoginException e) {
- LOG.warn("uri=" + uri, e);
- }
- }
- username = ugi == null? null: ugi.getUserName();
+ username = UserGroupInformation.getCurrentUser().getUserName();
}
/** {@inheritDoc} */
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/ConnectionHeader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/ConnectionHeader.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/ConnectionHeader.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/ConnectionHeader.java Fri Mar 4 03:44:54 2011
@@ -25,7 +25,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
/**
@@ -36,7 +35,7 @@ class ConnectionHeader implements Writab
public static final Log LOG = LogFactory.getLog(ConnectionHeader.class);
private String protocol;
- private UserGroupInformation ugi = new UnixUserGroupInformation();
+ private UserGroupInformation ugi = null;
public ConnectionHeader() {}
@@ -60,9 +59,10 @@ class ConnectionHeader implements Writab
protocol = null;
}
- boolean ugiPresent = in.readBoolean();
- if (ugiPresent) {
- ugi.readFields(in);
+ boolean ugiUsernamePresent = in.readBoolean();
+ if (ugiUsernamePresent) {
+ String username = in.readUTF();
+ ugi = UserGroupInformation.createRemoteUser(username);
} else {
ugi = null;
}
@@ -73,7 +73,7 @@ class ConnectionHeader implements Writab
Text.writeString(out, (protocol == null) ? "" : protocol);
if (ugi != null) {
out.writeBoolean(true);
- ugi.write(out);
+ out.writeUTF(ugi.getUserName());
} else {
out.writeBoolean(false);
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/RPC.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/RPC.java Fri Mar 4 03:44:54 2011
@@ -32,19 +32,16 @@ import java.util.Map;
import java.util.HashMap;
import javax.net.SocketFactory;
-import javax.security.auth.Subject;
-import javax.security.auth.login.LoginException;
import org.apache.commons.logging.*;
import org.apache.hadoop.io.*;
-import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.net.NetUtils;
+
/** A simple RPC mechanism.
*
* A <i>protocol</i> is a Java interface. All parameters and return types must
@@ -343,12 +340,7 @@ public class RPC {
Class<? extends VersionedProtocol> protocol,
long clientVersion, InetSocketAddress addr, Configuration conf,
SocketFactory factory) throws IOException {
- UserGroupInformation ugi = null;
- try {
- ugi = UserGroupInformation.login(conf);
- } catch (LoginException le) {
- throw new RuntimeException("Couldn't login!");
- }
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
return getProxy(protocol, clientVersion, addr, ugi, conf, factory);
}
@@ -388,8 +380,8 @@ public class RPC {
long clientVersion, InetSocketAddress addr, Configuration conf)
throws IOException {
- return getProxy(protocol, clientVersion, addr, conf, NetUtils
- .getDefaultSocketFactory(conf));
+ return getProxy(protocol, clientVersion, addr, conf,
+ NetUtils.getDefaultSocketFactory(conf));
}
/**
@@ -462,7 +454,6 @@ public class RPC {
public static class Server extends org.apache.hadoop.ipc.Server {
private Object instance;
private boolean verbose;
- private boolean authorize = false;
/** Construct an RPC server.
* @param instance the instance whose methods will be called
@@ -496,9 +487,6 @@ public class RPC {
super(bindAddress, port, Invocation.class, numHandlers, conf, classNameBase(instance.getClass().getName()));
this.instance = instance;
this.verbose = verbose;
- this.authorize =
- conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG,
- false);
}
public Writable call(Class<?> protocol, Writable param, long receivedTime)
@@ -561,21 +549,6 @@ public class RPC {
throw ioe;
}
}
-
- @Override
- public void authorize(Subject user, ConnectionHeader connection)
- throws AuthorizationException {
- if (authorize) {
- Class<?> protocol = null;
- try {
- protocol = getProtocolClass(connection.getProtocol(), getConf());
- } catch (ClassNotFoundException cfne) {
- throw new AuthorizationException("Unknown protocol: " +
- connection.getProtocol());
- }
- ServiceAuthorizationManager.authorize(user, protocol);
- }
- }
}
private static void log(String value) {
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/Server.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/ipc/Server.java Fri Mar 4 03:44:54 2011
@@ -18,12 +18,18 @@
package org.apache.hadoop.ipc;
-import java.io.IOException;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
-
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.net.BindException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.SocketException;
+import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.ClosedChannelException;
@@ -33,41 +39,30 @@ import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.nio.channels.WritableByteChannel;
-
-import java.net.BindException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.SocketException;
-import java.net.UnknownHostException;
-
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
-import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
-import javax.security.auth.Subject;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.ipc.metrics.RpcMetrics;
-import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
/** An abstract IPC service. IPC calls take a single {@link Writable} as a
* parameter, and return a {@link Writable} as their value. A service runs on
@@ -76,6 +71,7 @@ import org.apache.hadoop.security.UserGr
* @see Client
*/
public abstract class Server {
+ private final boolean authorize;
/**
* The first four bytes of Hadoop RPC connections
@@ -719,7 +715,7 @@ public abstract class Server {
ConnectionHeader header = new ConnectionHeader();
Class<?> protocol;
- Subject user = null;
+ UserGroupInformation user = null;
// Fake 'call' for failed authorization response
private final int AUTHROIZATION_FAILED_CALLID = -1;
@@ -890,14 +886,7 @@ public abstract class Server {
throw new IOException("Unknown protocol: " + header.getProtocol());
}
- // TODO: Get the user name from the GSS API for Kerberbos-based security
- // Create the user subject; however use the groups as defined on the
- // server-side, don't trust the user groups provided by the client
- UserGroupInformation ugi = header.getUgi();
- user = null;
- if(ugi != null) {
- user = SecurityUtil.getSubject(conf, header.getUgi().getUserName());
- }
+ user = header.getUgi();
}
private void processData() throws IOException, InterruptedException {
@@ -956,24 +945,23 @@ public abstract class Server {
try {
// Make the call as the user via Subject.doAs, thus associating
// the call with the Subject
- value =
- Subject.doAs(call.connection.user,
- new PrivilegedExceptionAction<Writable>() {
- @Override
- public Writable run() throws Exception {
- // make the call
- return call(call.connection.protocol,
- call.param, call.timestamp);
-
- }
- }
- );
-
- } catch (PrivilegedActionException pae) {
- Exception e = pae.getException();
- LOG.info(getName()+", call "+call+": error: " + e, e);
- errorClass = e.getClass().getName();
- error = StringUtils.stringifyException(e);
+ if (call.connection.user == null) {
+ value = call(call.connection.protocol, call.param,
+ call.timestamp);
+ } else {
+ value =
+ call.connection.user.doAs
+ (new PrivilegedExceptionAction<Writable>() {
+ @Override
+ public Writable run() throws Exception {
+ // make the call
+ return call(call.connection.protocol,
+ call.param, call.timestamp);
+
+ }
+ }
+ );
+ }
} catch (Throwable e) {
LOG.info(getName()+", call "+call+": error: " + e, e);
errorClass = e.getClass().getName();
@@ -1027,6 +1015,9 @@ public abstract class Server {
this.maxIdleTime = 2*conf.getInt("ipc.client.connection.maxidletime", 1000);
this.maxConnectionsToNuke = conf.getInt("ipc.client.kill.max", 10);
this.thresholdIdleConnections = conf.getInt("ipc.client.idlethreshold", 4000);
+ this.authorize =
+ conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG,
+ false);
// Start the listener here and let it bind to the port
listener = new Listener();
@@ -1158,8 +1149,20 @@ public abstract class Server {
* @param connection incoming connection
* @throws AuthorizationException when the client isn't authorized to talk the protocol
*/
- public void authorize(Subject user, ConnectionHeader connection)
- throws AuthorizationException {}
+ public void authorize(UserGroupInformation user,
+ ConnectionHeader connection
+ ) throws AuthorizationException {
+ if (authorize) {
+ Class<?> protocol = null;
+ try {
+ protocol = getProtocolClass(connection.getProtocol(), getConf());
+ } catch (ClassNotFoundException cfne) {
+ throw new AuthorizationException("Unknown protocol: " +
+ connection.getProtocol());
+ }
+ ServiceAuthorizationManager.authorize(user, protocol);
+ }
+ }
/**
* The number of open RPC conections
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/GroupMappingServiceProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/GroupMappingServiceProvider.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/GroupMappingServiceProvider.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/GroupMappingServiceProvider.java Fri Mar 4 03:44:54 2011
@@ -27,10 +27,10 @@ import java.util.List;
interface GroupMappingServiceProvider {
/**
- * Get all various {@link Group} memberships of a given {@link User}.
+ * Get all various group memberships of a given user.
* Returns EMPTY list in case of non-existing user
- * @param user <code>User</code> name
- * @return <code>Group</code> memberships of <code>user</code>
+ * @param user User's name
+ * @return group memberships of user
* @throws IOException
*/
public List<String> getGroups(String user) throws IOException;
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/Groups.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/Groups.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/Groups.java Fri Mar 4 03:44:54 2011
@@ -31,14 +31,13 @@ import org.apache.commons.logging.LogFac
/**
* A user-to-groups mapping service.
*
- * {@link Groups} allows for server to get the various {@link Group} memberships
- * of a given {@link User} via the {@link #getGroups(String)} call, thus ensuring
- * a consistent user-to-groups mapping and protects against vagaries of different
- * mappings on servers and clients in a Hadoop cluster.
+ * {@link Groups} allows for server to get the various group memberships
+ * of a given user via the {@link #getGroups(String)} call, thus ensuring
+ * a consistent user-to-groups mapping and protects against vagaries of
+ * different mappings on servers and clients in a Hadoop cluster.
*/
public class Groups {
private static final Log LOG = LogFactory.getLog(Groups.class);
-
private final GroupMappingServiceProvider impl;
private final Map<String, CachedGroups> userToGroupsMap =
@@ -61,9 +60,9 @@ public class Groups {
}
/**
- * Get the {@link Group} memberships of a given {@link User}.
- * @param user <code>User</code> name
- * @return the <code>Group</code> memberships of <code>user</code>
+ * Get the group memberships of a given user.
+ * @param user User's name
+ * @return the group memberships of the user
* @throws IOException
*/
public List<String> getGroups(String user) throws IOException {
@@ -75,7 +74,6 @@ public class Groups {
LOG.info("Returning cached groups for '" + user + "'");
return groups.getGroups();
}
-
// Create and cache user's groups
groups = new CachedGroups(impl.getGroups(user));
userToGroupsMap.put(user, groups);
@@ -108,4 +106,18 @@ public class Groups {
return groups;
}
}
+
+ private static Groups GROUPS = null;
+
+ /**
+ * Get the groups being used to map user-to-groups.
+ * @return the groups being used to map user-to-groups.
+ */
+ public static Groups getUserToGroupsMappingService(Configuration conf) {
+ if(GROUPS == null) {
+ LOG.debug(" Creating new Groups object");
+ GROUPS = new Groups(conf);
+ }
+ return GROUPS;
+ }
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java Fri Mar 4 03:44:54 2011
@@ -34,7 +34,7 @@ public interface RefreshUserToGroupMappi
public static final long versionID = 1L;
/**
- * Refresh {@link User} to {@link Group} mappings.
+ * Refresh user to group mappings.
* @param conf
* @throws IOException
*/
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java Fri Mar 4 03:44:54 2011
@@ -30,9 +30,9 @@ import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Shell.ExitCodeException;
/**
- * A simple shell-based implementation of {@link GroupMappingServiceProvider} which
- * exec's the <code>groups</code> shell command to fetch the {@link Group}
- * memberships of a given {@link User}.
+ * A simple shell-based implementation of {@link GroupMappingServiceProvider}
+ * that exec's the <code>groups</code> shell command to fetch the group
+ * memberships of a given user.
*/
public class ShellBasedUnixGroupsMapping implements GroupMappingServiceProvider {
Map<String, List<String>> userGroups =
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/User.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/User.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/User.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/User.java Fri Mar 4 03:44:54 2011
@@ -20,51 +20,62 @@ package org.apache.hadoop.security;
import java.security.Principal;
/**
- * The username of a user.
+ * Save the full and short name of the user as a principal. This allows us to
+ * have a single type that we always look for when picking up user names.
*/
-public class User implements Principal {
- final String user;
+class User implements Principal {
+ private final String fullName;
+ private final String shortName;
+
+ public User(String name) {
+ fullName = name;
+ int atIdx = name.indexOf('@');
+ if (atIdx == -1) {
+ shortName = name;
+ } else {
+ int slashIdx = name.indexOf('/');
+ if (slashIdx == -1 || atIdx < slashIdx) {
+ shortName = name.substring(0, atIdx);
+ } else {
+ shortName = name.substring(0, slashIdx);
+ }
+ }
+ }
/**
- * Create a new <code>User</code> with the given username.
- * @param user user name
+ * Get the full name of the user.
*/
- public User(String user) {
- this.user = user;
- }
-
@Override
public String getName() {
- return user;
+ return fullName;
}
-
+
+ /**
+ * Get the user name up to the first '/' or '@'
+ * @return the leading part of the user name
+ */
+ public String getShortName() {
+ return shortName;
+ }
+
@Override
- public String toString() {
- return user;
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ } else if (o == null || getClass() != o.getClass()) {
+ return false;
+ } else {
+ return fullName.equals(((User) o).fullName);
+ }
}
-
+
@Override
public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((user == null) ? 0 : user.hashCode());
- return result;
+ return fullName.hashCode();
}
-
+
@Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- User other = (User) obj;
- if (user == null) {
- if (other.user != null)
- return false;
- } else if (!user.equals(other.user))
- return false;
- return true;
+ public String toString() {
+ return fullName;
}
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UserGroupInformation.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/UserGroupInformation.java Fri Mar 4 03:44:54 2011
@@ -18,112 +18,562 @@
package org.apache.hadoop.security;
import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.Principal;
+import java.security.PrivilegedAction;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
import java.util.Set;
import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
+import javax.security.auth.spi.LoginModule;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
-/** A {@link Writable} abstract class for storing user and groups information.
+import com.sun.security.auth.NTUserPrincipal;
+import com.sun.security.auth.UnixPrincipal;
+import com.sun.security.auth.module.Krb5LoginModule;
+
+/**
+ * User and group information for Hadoop.
+ * This class wraps around a JAAS Subject and provides methods to determine the
+ * user's username and groups. It supports both the Windows, Unix and Kerberos
+ * login modules.
*/
-public abstract class UserGroupInformation implements Writable, Principal {
- public static final Log LOG = LogFactory.getLog(UserGroupInformation.class);
- private static UserGroupInformation LOGIN_UGI = null;
-
- private static final ThreadLocal<Subject> currentUser =
- new ThreadLocal<Subject>();
-
- /** @return the {@link UserGroupInformation} for the current thread */
- public static UserGroupInformation getCurrentUGI() {
- Subject user = getCurrentUser();
-
- if (user == null) {
- user = currentUser.get();
+public class UserGroupInformation {
+ private static final Log LOG = LogFactory.getLog(UserGroupInformation.class);
+ private static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
+
+ /**
+ * A login module that looks at the Kerberos, Unix, or Windows principal and
+ * adds the corresponding UserName.
+ */
+ public static class HadoopLoginModule implements LoginModule {
+ private Subject subject;
+
+ @Override
+ public boolean abort() throws LoginException {
+ return true;
+ }
+
+ private <T extends Principal> T getCanonicalUser(Class<T> cls) {
+ for(T user: subject.getPrincipals(cls)) {
+ return user;
+ }
+ return null;
+ }
+
+ @Override
+ public boolean commit() throws LoginException {
+ Principal user = null;
+ // if we are using kerberos, try it out
+ if (useKerberos) {
+ user = getCanonicalUser(KerberosPrincipal.class);
+ }
+ // if we don't have a kerberos user, use the OS user
if (user == null) {
- return null;
+ user = getCanonicalUser(OS_PRINCIPAL_CLASS);
+ }
+ // if we found the user, add our principal
+ if (user != null) {
+ subject.getPrincipals().add(new User(user.getName()));
+ return true;
}
+ LOG.error("Can't find user in " + subject);
+ throw new LoginException("Can't find user name");
+ }
+
+ @Override
+ public void initialize(Subject subject, CallbackHandler callbackHandler,
+ Map<String, ?> sharedState, Map<String, ?> options) {
+ this.subject = subject;
+ }
+
+ @Override
+ public boolean login() throws LoginException {
+ return true;
+ }
+
+ @Override
+ public boolean logout() throws LoginException {
+ return true;
+ }
+ }
+
+ /** Are the static variables that depend on configuration initialized? */
+ private static boolean isInitialized = false;
+ /** Should we use Kerberos configuration? */
+ private static boolean useKerberos;
+ /** Server-side groups fetching service */
+ private static Groups groups;
+
+ /**
+ * A method to initialize the fields that depend on a configuration.
+ * Must be called before useKerberos or groups is used.
+ */
+ private static synchronized void ensureInitialized() {
+ if (!isInitialized) {
+ initialize(new Configuration());
+ }
+ }
+
+ /**
+ * Set the configuration values for UGI.
+ * @param conf the configuration to use
+ */
+ private static synchronized void initialize(Configuration conf) {
+ String value = conf.get(HADOOP_SECURITY_AUTHENTICATION);
+ if ("simple".equals(value)) {
+ useKerberos = false;
+ } else if (value == null || "kerberos".equals(value)) {
+ useKerberos = true;
+ } else {
+ throw new IllegalArgumentException("Invalid attribute value for " +
+ HADOOP_SECURITY_AUTHENTICATION +
+ " of " + value);
+ }
+ // If we haven't set up testing groups, use the configuration to find it
+ if (!(groups instanceof TestingGroups)) {
+ groups = Groups.getUserToGroupsMappingService(conf);
+ }
+ // Set the configuration for JAAS to be the Hadoop configuration.
+ // This is done here rather than a static initializer to avoid a
+ // circular dependence.
+ javax.security.auth.login.Configuration.setConfiguration
+ (new HadoopConfiguration());
+ isInitialized = true;
+ }
+
+ /**
+ * Set the static configuration for UGI.
+ * In particular, set the security authentication mechanism and the
+ * group look up service.
+ * @param conf the configuration to use
+ */
+ public static void setConfiguration(Configuration conf) {
+ initialize(conf);
+ }
+
+ /**
+ * Determine if UserGroupInformation is using Kerberos to determine
+ * user identities or is relying on simple authentication
+ *
+ * @return true if UGI is working in a secure environment
+ */
+ public static boolean isSecurityEnabled() {
+ ensureInitialized();
+ return useKerberos;
+ }
+
+ /**
+ * Information about the logged in user.
+ */
+ private static UserGroupInformation loginUser = null;
+ private static String keytabPrincipal = null;
+ private static String keytabFile = null;
+
+ private final Subject subject;
+ private final Set<Token<? extends TokenIdentifier>> tokens =
+ new LinkedHashSet<Token<? extends TokenIdentifier>>();
+
+ private static final String OS_LOGIN_MODULE_NAME;
+ private static final Class<? extends Principal> OS_PRINCIPAL_CLASS;
+ private static final boolean windows =
+ System.getProperty("os.name").startsWith("Windows");
+ static {
+ if (windows) {
+ OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.NTLoginModule";
+ OS_PRINCIPAL_CLASS = NTUserPrincipal.class;
+ } else {
+ OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.UnixLoginModule";
+ OS_PRINCIPAL_CLASS = UnixPrincipal.class;
}
+ }
+
+ /**
+ * A JAAS configuration that defines the login modules that we want
+ * to use for login.
+ */
+ private static class HadoopConfiguration
+ extends javax.security.auth.login.Configuration {
+ private static final String SIMPLE_CONFIG_NAME = "hadoop-simple";
+ private static final String USER_KERBEROS_CONFIG_NAME =
+ "hadoop-user-kerberos";
+ private static final String KEYTAB_KERBEROS_CONFIG_NAME =
+ "hadoop-keytab-kerberos";
- Set<UserGroupInformation> ugiPrincipals =
- user.getPrincipals(UserGroupInformation.class);
+ private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
+ new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
+ LoginModuleControlFlag.REQUIRED,
+ new HashMap<String,String>());
+ private static final AppConfigurationEntry HADOOP_LOGIN =
+ new AppConfigurationEntry(HadoopLoginModule.class.getName(),
+ LoginModuleControlFlag.REQUIRED,
+ new HashMap<String,String>());
+ private static final Map<String,String> USER_KERBEROS_OPTIONS =
+ new HashMap<String,String>();
+ static {
+ USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+ USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
+ }
+ private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
+ new AppConfigurationEntry(Krb5LoginModule.class.getName(),
+ LoginModuleControlFlag.OPTIONAL,
+ USER_KERBEROS_OPTIONS);
+ private static final Map<String,String> KEYTAB_KERBEROS_OPTIONS =
+ new HashMap<String,String>();
+ static {
+ KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+ KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+ KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
+ }
+ private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
+ new AppConfigurationEntry(Krb5LoginModule.class.getName(),
+ LoginModuleControlFlag.REQUIRED,
+ KEYTAB_KERBEROS_OPTIONS);
- UserGroupInformation ugi = null;
- if (ugiPrincipals != null && ugiPrincipals.size() == 1) {
- ugi = ugiPrincipals.iterator().next();
- if (ugi == null) {
- throw new RuntimeException("Cannot find _current user_ UGI in the Subject!");
+ private static final AppConfigurationEntry[] SIMPLE_CONF =
+ new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HADOOP_LOGIN};
+
+ private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
+ new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN,
+ HADOOP_LOGIN};
+
+ private static final AppConfigurationEntry[] KEYTAB_KERBEROS_CONF =
+ new AppConfigurationEntry[]{KEYTAB_KERBEROS_LOGIN, HADOOP_LOGIN};
+
+ @Override
+ public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+ if (SIMPLE_CONFIG_NAME.equals(appName)) {
+ return SIMPLE_CONF;
+ } else if (USER_KERBEROS_CONFIG_NAME.equals(appName)) {
+ return USER_KERBEROS_CONF;
+ } else if (KEYTAB_KERBEROS_CONFIG_NAME.equals(appName)) {
+ KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+ KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+ return KEYTAB_KERBEROS_CONF;
}
- } else {
- throw new RuntimeException("Cannot resolve current user from subject, " +
- "which had " + ugiPrincipals.size() +
- " UGI principals!");
+ return null;
}
- return ugi;
}
- /**
- * Set the {@link UserGroupInformation} for the current thread
- * @deprecated Use {@link #setCurrentUser(UserGroupInformation)}
- */
- @Deprecated
- public static void setCurrentUGI(UserGroupInformation ugi) {
- setCurrentUser(ugi);
+ /**
+ * Create a UserGroupInformation for the given subject.
+ * This does not change the subject or acquire new credentials.
+ * @param subject the user's subject
+ */
+ UserGroupInformation(Subject subject) {
+ this.subject = subject;
}
/**
- * Return the current user <code>Subject</code>.
- * @return the current user <code>Subject</code>
+ * Return the current user, including any doAs in the current stack.
+ * @return the current user
+ * @throws IOException if login fails
*/
- static Subject getCurrentUser() {
- return Subject.getSubject(AccessController.getContext());
+ public static UserGroupInformation getCurrentUser() throws IOException {
+ AccessControlContext context = AccessController.getContext();
+ Subject subject = Subject.getSubject(context);
+ return subject == null ? getLoginUser() : new UserGroupInformation(subject);
}
-
+
/**
- * Set the {@link UserGroupInformation} for the current thread
- * WARNING - This method should be used only in test cases and other exceptional
- * cases!
- * @param ugi {@link UserGroupInformation} for the current thread
+ * Get the currently logged in user.
+ * @return the logged in user
+ * @throws IOException if login fails
*/
- public static void setCurrentUser(UserGroupInformation ugi) {
- Subject user = SecurityUtil.getSubject(ugi);
- currentUser.set(user);
+ public synchronized
+ static UserGroupInformation getLoginUser() throws IOException {
+ if (loginUser == null) {
+ try {
+ LoginContext login;
+ if (isSecurityEnabled()) {
+ login = new LoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME);
+ } else {
+ login = new LoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME);
+ }
+ login.login();
+ loginUser = new UserGroupInformation(login.getSubject());
+ } catch (LoginException le) {
+ throw new IOException("failure to login", le);
+ }
+ }
+ return loginUser;
+ }
+
+ /**
+ * Log a user in from a keytab file. Loads a user identity from a keytab
+ * file and login them in. They become the currently logged-in user.
+ * @param user the principal name to load from the keytab
+ * @param path the path to the keytab file
+ * @throws IOException if the keytab file can't be read
+ */
+ public synchronized
+ static void loginUserFromKeytab(String user,
+ String path
+ ) throws IOException {
+ if (!isSecurityEnabled())
+ return;
+
+ keytabFile = path;
+ keytabPrincipal = user;
+ try {
+ LoginContext login =
+ new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME);
+ login.login();
+ loginUser = new UserGroupInformation(login.getSubject());
+ } catch (LoginException le) {
+ throw new IOException("Login failure for " + user + " from keytab " +
+ path, le);
+ }
+ }
+
+ /**
+ * Create a user from a login name. It is intended to be used for remote
+ * users in RPC, since it won't have any credentials.
+ * @param user the full user principal name, must not be empty or null
+ * @return the UserGroupInformation for the remote user.
+ */
+ public static UserGroupInformation createRemoteUser(String user) {
+ if (user == null || "".equals(user)) {
+ throw new IllegalArgumentException("Null user");
+ }
+ Subject subject = new Subject();
+ subject.getPrincipals().add(new User(user));
+ return new UserGroupInformation(subject);
}
- /** Get username
+ /**
+ * This class is used for storing the groups for testing. It stores a local
+ * map that has the translation of usernames to groups.
+ */
+ private static class TestingGroups extends Groups {
+ private final Map<String, List<String>> userToGroupsMapping =
+ new HashMap<String,List<String>>();
+
+ private TestingGroups() {
+ super(new org.apache.hadoop.conf.Configuration());
+ }
+
+ @Override
+ public List<String> getGroups(String user) {
+ List<String> result = userToGroupsMapping.get(user);
+ if (result == null) {
+ result = new ArrayList<String>();
+ }
+ return result;
+ }
+
+ private void setUserGroups(String user, String[] groups) {
+ userToGroupsMapping.put(user, Arrays.asList(groups));
+ }
+ }
+
+ /**
+ * Create a UGI for testing HDFS and MapReduce
+ * @param user the full user principal name
+ * @param userGroups the names of the groups that the user belongs to
+ * @return a fake user for running unit tests
+ */
+ public static UserGroupInformation createUserForTesting(String user,
+ String[] userGroups) {
+ ensureInitialized();
+ UserGroupInformation ugi = createRemoteUser(user);
+ // make sure that the testing object is setup
+ if (!(groups instanceof TestingGroups)) {
+ groups = new TestingGroups();
+ }
+ // add the user groups
+ ((TestingGroups) groups).setUserGroups(ugi.getShortUserName(), userGroups);
+ return ugi;
+ }
+
+ /**
+ * Get the user's login name.
+ * @return the user's name up to the first '/' or '@'.
+ */
+ public String getShortUserName() {
+ for (User p: subject.getPrincipals(User.class)) {
+ return p.getShortName();
+ }
+ return null;
+ }
+
+ /**
+ * Get the user's full principal name.
+ * @return the user's full principal name.
+ */
+ public String getUserName() {
+ for (User p: subject.getPrincipals(User.class)) {
+ return p.getName();
+ }
+ return null;
+ }
+
+ /**
+ * Add a token to this UGI
*
- * @return the user's name
+ * @param token Token to be added
+ * @return true on successful add of new token
*/
- public abstract String getUserName();
+ public synchronized boolean addToken(Token<? extends TokenIdentifier> token) {
+ return tokens.add(token);
+ }
- /** Get the name of the groups that the user belong to
+ /**
+ * Obtain the collection of tokens associated with this user.
*
- * @return an array of group names
+ * @return an unmodifiable collection of tokens associated with user
*/
- public abstract String[] getGroupNames();
+ public synchronized Collection<Token<? extends TokenIdentifier>> getTokens() {
+ return Collections.unmodifiableSet(tokens);
+ }
- /** Login and return a UserGroupInformation object. */
- public static UserGroupInformation login(Configuration conf
- ) throws LoginException {
- if (LOGIN_UGI == null) {
- LOGIN_UGI = UnixUserGroupInformation.login(conf);
+ /**
+ * Get the group names for this user.
+ * @return the list of users with the primary group first. If the command
+ * fails, it returns an empty list.
+ */
+ public synchronized String[] getGroupNames() {
+ ensureInitialized();
+ try {
+ List<String> result = groups.getGroups(getShortUserName());
+ return result.toArray(new String[result.size()]);
+ } catch (IOException ie) {
+ LOG.warn("No groups available for user " + getShortUserName());
+ return new String[0];
+ }
+ }
+
+ /**
+ * Return the username.
+ */
+ @Override
+ public String toString() {
+ return getUserName();
+ }
+
+ /**
+ * Compare the subjects to see if they are equal to each other.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ } else if (o == null || getClass() != o.getClass()) {
+ return false;
+ } else {
+ return subject.equals(((UserGroupInformation) o).subject);
}
- return LOGIN_UGI;
}
- /** Read a {@link UserGroupInformation} from conf */
- public static UserGroupInformation readFrom(Configuration conf
- ) throws IOException {
+ /**
+ * Return the hash of the subject.
+ */
+ @Override
+ public int hashCode() {
+ return subject.hashCode();
+ }
+
+ /**
+ * Get the underlying subject from this ugi.
+ * @return the subject that represents this user.
+ */
+ protected Subject getSubject() {
+ return subject;
+ }
+
+ /**
+ * Run the given action as the user.
+ * @param <T> the return type of the run method
+ * @param action the method to execute
+ * @return the value from the run method
+ */
+ public <T> T doAs(PrivilegedAction<T> action) {
+ return Subject.doAs(subject, action);
+ }
+
+ /**
+ * Run the given action as the user, potentially throwing an exception.
+ * @param <T> the return type of the run method
+ * @param action the method to execute
+ * @return the value from the run method
+ * @throws IOException if the action throws an IOException
+ * @throws Error if the action throws an Error
+ * @throws RuntimeException if the action throws a RuntimeException
+ * @throws InterruptedException if the action throws an InterruptedException
+ * @throws UndeclaredThrowableException if the action throws something else
+ */
+ public <T> T doAs(PrivilegedExceptionAction<T> action
+ ) throws IOException, InterruptedException {
try {
- return UnixUserGroupInformation.readFromConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME);
- } catch (LoginException e) {
- throw (IOException)new IOException().initCause(e);
+ return Subject.doAs(subject, action);
+ } catch (PrivilegedActionException pae) {
+ Throwable cause = pae.getCause();
+ if (cause instanceof IOException) {
+ throw (IOException) cause;
+ } else if (cause instanceof Error) {
+ throw (Error) cause;
+ } else if (cause instanceof RuntimeException) {
+ throw (RuntimeException) cause;
+ } else if (cause instanceof InterruptedException) {
+ throw (InterruptedException) cause;
+ } else {
+ throw new UndeclaredThrowableException(pae,"Unknown exception in doAs");
+ }
+ }
+ }
+
+ private void print() throws IOException {
+ System.out.println("User: " + getUserName());
+ System.out.print("Group Ids: ");
+ System.out.println();
+ String[] groups = getGroupNames();
+ System.out.print("Groups: ");
+ for(int i=0; i < groups.length; i++) {
+ System.out.print(groups[i] + " ");
+ }
+ System.out.println();
+ }
+
+ /**
+ * A test method to print out the current user's UGI.
+ * @param args if there are two arguments, read the user from the keytab
+ * and print it out.
+ * @throws Exception
+ */
+ public static void main(String [] args) throws Exception {
+ System.out.println("Getting UGI for current user");
+ UserGroupInformation ugi = getCurrentUser();
+ ugi.print();
+ System.out.println("UGI: " + ugi);
+ System.out.println("============================================================");
+
+ if (args.length == 2) {
+ System.out.println("Getting UGI from keytab....");
+ loginUserFromKeytab(args[0], args[1]);
+ getCurrentUser().print();
+ System.out.println("Keytab: " + ugi);
}
}
}
Added: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/AccessControlList.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/AccessControlList.java?rev=1077137&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/AccessControlList.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/AccessControlList.java Fri Mar 4 03:44:54 2011
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.security.authorize;
+
+import java.util.Iterator;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * Class representing a configured access control list.
+ */
+public class AccessControlList {
+
+ // Indicates an ACL string that represents access to all users
+ public static final String WILDCARD_ACL_VALUE = "*";
+
+ // Set of users who are granted access.
+ private Set<String> users;
+ // Set of groups which are granted access
+ private Set<String> groups;
+ // Whether all users are granted access.
+ private boolean allAllowed;
+
+ /**
+ * Construct a new ACL from a String representation of the same.
+ *
+ * The String is a a comma separated list of users and groups.
+ * The user list comes first and is separated by a space followed
+ * by the group list. For e.g. "user1,user2 group1,group2"
+ *
+ * @param aclString String representation of the ACL
+ */
+ public AccessControlList(String aclString) {
+ users = new TreeSet<String>();
+ groups = new TreeSet<String>();
+ if (aclString.contains(WILDCARD_ACL_VALUE) &&
+ aclString.trim().equals(WILDCARD_ACL_VALUE)) {
+ allAllowed = true;
+ } else {
+ String[] userGroupStrings = aclString.split(" ", 2);
+
+ if (userGroupStrings.length >= 1) {
+ String[] usersStr = userGroupStrings[0].split(",");
+ if (usersStr.length >= 1) {
+ addToSet(users, usersStr);
+ }
+ }
+
+ if (userGroupStrings.length == 2) {
+ String[] groupsStr = userGroupStrings[1].split(",");
+ if (groupsStr.length >= 1) {
+ addToSet(groups, groupsStr);
+ }
+ }
+ }
+ }
+
+ public boolean isAllAllowed() {
+ return allAllowed;
+ }
+
+ /**
+ * Get the names of users allowed for this service.
+ * @return the set of user names. the set must not be modified.
+ */
+ Set<String> getUsers() {
+ return users;
+ }
+
+ /**
+ * Get the names of user groups allowed for this service.
+ * @return the set of group names. the set must not be modified.
+ */
+ Set<String> getGroups() {
+ return groups;
+ }
+
+ public boolean isUserAllowed(UserGroupInformation ugi) {
+ if (allAllowed || users.contains(ugi.getUserName())) {
+ return true;
+ } else {
+ for(String group: ugi.getGroupNames()) {
+ if (groups.contains(group)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ private static final void addToSet(Set<String> set, String[] strings) {
+ for (String s : strings) {
+ s = s.trim();
+ if (s.length() > 0) {
+ set.add(s);
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ boolean first = true;
+ for(String user: users) {
+ if (!first) {
+ sb.append(",");
+ } else {
+ first = false;
+ }
+ sb.append(user);
+ }
+ if (!groups.isEmpty()) {
+ sb.append(" ");
+ }
+ first = true;
+ for(String group: groups) {
+ if (!first) {
+ sb.append(",");
+ } else {
+ first = false;
+ }
+ sb.append(group);
+ }
+ return sb.toString();
+ }
+}
\ No newline at end of file
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/Service.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/Service.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/Service.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/Service.java Fri Mar 4 03:44:54 2011
@@ -28,11 +28,11 @@ import java.security.Permission;
*/
public class Service {
private String key;
- private Permission permission;
+ private Class<?> protocol;
public Service(String key, Class<?> protocol) {
this.key = key;
- this.permission = new ConnectionPermission(protocol);
+ this.protocol = protocol;
}
/**
@@ -44,10 +44,10 @@ public class Service {
}
/**
- * Get the {@link Permission} required to access the service.
- * @return the {@link Permission} required to access the service
+ * Get the protocol for the service
+ * @return the {@link Class} for the protocol
*/
- public Permission getPermission() {
- return permission;
+ public Class<?> getProtocol() {
+ return protocol;
}
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1077137&r1=1077136&r2=1077137&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Fri Mar 4 03:44:54 2011
@@ -17,19 +17,10 @@
*/
package org.apache.hadoop.security.authorize;
-import java.security.AccessControlException;
-import java.security.AccessController;
-import java.security.Permission;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import java.util.Collections;
-import java.util.HashMap;
+import java.util.IdentityHashMap;
import java.util.Map;
-import javax.security.auth.Subject;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
/**
@@ -37,9 +28,10 @@ import org.apache.hadoop.security.UserGr
* for incoming service requests.
*/
public class ServiceAuthorizationManager {
+ private static final String HADOOP_POLICY_FILE = "hadoop-policy.xml";
- private static final Log LOG =
- LogFactory.getLog(ServiceAuthorizationManager.class);
+ private static Map<Class<?>, AccessControlList> protocolToAcl =
+ new IdentityHashMap<Class<?>, AccessControlList>();
/**
* Configuration key for controlling service-level authorization for Hadoop.
@@ -47,9 +39,6 @@ public class ServiceAuthorizationManager
public static final String SERVICE_AUTHORIZATION_CONFIG =
"hadoop.security.authorization";
- private static Map<Class<?>, Permission> protocolToPermissionMap =
- Collections.synchronizedMap(new HashMap<Class<?>, Permission>());
-
/**
* Authorize the user to access the protocol being used.
*
@@ -57,49 +46,48 @@ public class ServiceAuthorizationManager
* @param protocol service being accessed
* @throws AuthorizationException on authorization failure
*/
- public static void authorize(Subject user, Class<?> protocol)
- throws AuthorizationException {
- Permission permission = protocolToPermissionMap.get(protocol);
- if (permission == null) {
- permission = new ConnectionPermission(protocol);
- protocolToPermissionMap.put(protocol, permission);
+ public static void authorize(UserGroupInformation user,
+ Class<?> protocol
+ ) throws AuthorizationException {
+ AccessControlList acl = protocolToAcl.get(protocol);
+ if (acl == null) {
+ throw new AuthorizationException("Protocol " + protocol +
+ " is not known.");
+ }
+ if (!acl.isUserAllowed(user)) {
+ throw new AuthorizationException("User " + user.toString() +
+ " is not authorized for protocol " +
+ protocol);
}
-
- checkPermission(user, permission);
}
-
- /**
- * Check if the given {@link Subject} has all of necessary {@link Permission}
- * set.
- *
- * @param user <code>Subject</code> to be authorized
- * @param permissions <code>Permission</code> set
- * @throws AuthorizationException if the authorization failed
- */
- private static void checkPermission(final Subject user,
- final Permission... permissions)
- throws AuthorizationException {
- try{
- Subject.doAs(user,
- new PrivilegedExceptionAction<Void>() {
- @Override
- public Void run() throws Exception {
- try {
- for(Permission permission : permissions) {
- AccessController.checkPermission(permission);
- }
- } catch (AccessControlException ace) {
- LOG.info("Authorization failed for " +
- UserGroupInformation.getCurrentUGI(), ace);
- throw new AuthorizationException(ace);
- }
- return null;
- }
- }
- );
- } catch (PrivilegedActionException e) {
- throw new AuthorizationException(e.getException());
+
+ public static synchronized void refresh(Configuration conf,
+ PolicyProvider provider) {
+ // Get the system property 'hadoop.policy.file'
+ String policyFile =
+ System.getProperty("hadoop.policy.file", HADOOP_POLICY_FILE);
+
+ // Make a copy of the original config, and load the policy file
+ Configuration policyConf = new Configuration(conf);
+ policyConf.addResource(policyFile);
+
+ final Map<Class<?>, AccessControlList> newAcls =
+ new IdentityHashMap<Class<?>, AccessControlList>();
+
+ // Parse the config file
+ Service[] services = provider.getServices();
+ if (services != null) {
+ for (Service service : services) {
+ AccessControlList acl =
+ new AccessControlList(
+ policyConf.get(service.getServiceKey(),
+ AccessControlList.WILDCARD_ACL_VALUE)
+ );
+ newAcls.put(service.getProtocol(), acl);
+ }
}
+
+ // Flip to the newly parsed permissions
+ protocolToAcl = newAcls;
}
-
}