You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by om...@apache.org on 2010/01/27 09:21:01 UTC
svn commit: r903562 [1/2] - in /hadoop/hdfs/trunk: ./ src/contrib/hdfsproxy/
src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/
src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/
src/java/org/apache/hadoop/hdfs/ src/java/org/apache/ha...
Author: omalley
Date: Wed Jan 27 08:20:58 2010
New Revision: 903562
URL: http://svn.apache.org/viewvc?rev=903562&view=rev
Log:
HDFS-905. Use the new UserGroupInformation from HDFS-6299.
(jghoman via omalley)
Removed:
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUgiManager.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUgiManager.java
Modified:
hadoop/hdfs/trunk/CHANGES.txt
hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/AccessTokenHandler.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java
hadoop/hdfs/trunk/src/test/hdfs-site.xml
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsCreateMkdir.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsPermission.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/permission/TestStickyBit.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/AppendTestUtil.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFileAppend2.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCreation.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestLeaseRecovery2.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestQuota.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestGroupMappingServiceRefresh.java
hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestPermission.java
Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Wed Jan 27 08:20:58 2010
@@ -4,6 +4,9 @@
INCOMPATIBLE CHANGES
+ HDFS-905. Use the new UserGroupInformation from HDFS-6299.
+ (jghoman via omalley)
+
NEW FEATURES
HDFS-654. Add support new atomic rename functionality in HDFS for
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/build.xml Wed Jan 27 08:20:58 2010
@@ -238,7 +238,7 @@
<sysproperty key="javax.net.ssl.keyStorePassword" value="changeme"/>
<sysproperty key="javax.net.ssl.keyPassword" value="changeme"/>
<sysproperty key="javax.net.ssl.clientCert" value="${ssl.client.cert}"/>
- <formatter type="xml" />
+ <formatter type="plain" />
<batchtest todir="${test.build.dir}" unless="testcase">
<fileset dir="${src.test}">
<include name="**/TestHdfsProxy.java"/>
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/LdapIpDirFilter.java Wed Jan 27 08:20:58 2010
@@ -46,7 +46,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -195,8 +195,7 @@
"User not authorized to access path");
return;
}
- UnixUserGroupInformation ugi = new UnixUserGroupInformation(userId,
- groupName.split(","));
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userId);
rqst.setAttribute("authorized.ugi", ugi);
// since we cannot pass ugi object cross context as they are from different
// classloaders in different war file, we have to use String attribute.
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java Wed Jan 27 08:20:58 2010
@@ -29,7 +29,7 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -49,28 +49,19 @@
/** {@inheritDoc} */
@Override
- protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
+ protected URI createUri(FileStatus i, UserGroupInformation ugi,
ClientProtocol nnproxy, HttpServletRequest request) throws IOException,
URISyntaxException {
return new URI(request.getScheme(), null, request.getServerName(), request
.getServerPort(), "/streamFile", "filename=" + i.getPath() + "&ugi="
- + ugi, null);
+ + ugi.getUserName(), null);
}
/** {@inheritDoc} */
@Override
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
+ protected UserGroupInformation getUGI(HttpServletRequest request) {
String userID = (String) request
.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
- String groupName = (String) request
- .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
- UnixUserGroupInformation ugi;
- if (groupName != null) {
- // get group info from ldap
- ugi = new UnixUserGroupInformation(userID, groupName.split(","));
- } else {// stronger ugi management
- ugi = ProxyUgiManager.getUgiForUser(userID);
- }
- return ugi;
+ return UserGroupInformation.createRemoteUser(userID);
}
}
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileForward.java Wed Jan 27 08:20:58 2010
@@ -19,7 +19,7 @@
import javax.servlet.http.HttpServletRequest;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
public class ProxyFileForward extends ProxyForwardServlet {
@@ -31,9 +31,10 @@
protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
String path = "/streamFile";
path += "?filename=" + request.getPathInfo();
- UnixUserGroupInformation ugi = (UnixUserGroupInformation)request.getAttribute("authorized.ugi");
+ UserGroupInformation ugi =
+ (UserGroupInformation)request.getAttribute("authorized.ugi");
if (ugi != null) {
- path += "&ugi=" + ugi.toString();
+ path += "&ugi=" + ugi.getUserName();
}
return path;
}
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFilter.java Wed Jan 27 08:20:58 2010
@@ -47,7 +47,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -58,9 +58,6 @@
/** Pattern for triggering reload of user permissions */
protected static final Pattern RELOAD_PATTERN = Pattern
.compile("^(/reloadPermFiles)$");
- /** Pattern for triggering clearing of ugi Cache */
- protected static final Pattern CLEAR_PATTERN = Pattern
- .compile("^(/clearUgiCache)$");
/** Pattern for a filter to find out if a request is HFTP/HSFTP request */
protected static final Pattern HFTP_PATTERN = Pattern
.compile("^(/listPaths|/data|/streamFile|/file)$");
@@ -301,12 +298,6 @@
LOG.info("User permissions and user certs files reloaded");
rsp.setStatus(HttpServletResponse.SC_OK);
return;
- } else if (CLEAR_PATTERN.matcher(servletPath).matches()
- && checkUser("Admin", certs[0])) {
- ProxyUgiManager.clearCache();
- LOG.info("Ugi cache cleared");
- rsp.setStatus(HttpServletResponse.SC_OK);
- return;
}
if (!isAuthorized) {
@@ -315,25 +306,14 @@
}
// request is authorized, set ugi for servlets
- UnixUserGroupInformation ugi = ProxyUgiManager
- .getUgiForUser(userID);
- if (ugi == null) {
- LOG.info("Can't retrieve ugi for user " + userID);
- rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
- "Can't retrieve ugi for user " + userID);
- return;
- }
+ UserGroupInformation ugi = UserGroupInformation.createRemoteUser(userID);
rqst.setAttribute("authorized.ugi", ugi);
rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID", userID);
} else if(rqst.getScheme().equalsIgnoreCase("http")) { // http request, set ugi for servlets, only for testing purposes
String ugi = rqst.getParameter("ugi");
if (ugi != null) {
- rqst.setAttribute("authorized.ugi", new UnixUserGroupInformation(ugi
- .split(",")));
- String[] ugiStr = ugi.split(",");
- if(ugiStr.length > 0) {
- rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID", ugiStr[0]);
- }
+ rqst.setAttribute("authorized.ugi", UserGroupInformation.createRemoteUser(ugi));
+ rqst.setAttribute("org.apache.hadoop.hdfsproxy.authorized.userID", ugi);
}
}
chain.doFilter(request, response);
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java Wed Jan 27 08:20:58 2010
@@ -24,7 +24,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
/** {@inheritDoc} */
public class ProxyListPathsServlet extends ListPathsServlet {
@@ -42,18 +42,9 @@
/** {@inheritDoc} */
@Override
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
+ protected UserGroupInformation getUGI(HttpServletRequest request) {
String userID = (String) request
.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
- String groupName = (String) request
- .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
- UnixUserGroupInformation ugi;
- if (groupName != null) {
- // group info stored in ldap
- ugi = new UnixUserGroupInformation(userID, groupName.split(","));
- } else {// stronger ugi management
- ugi = ProxyUgiManager.getUgiForUser(userID);
- }
- return ugi;
+ return UserGroupInformation.createRemoteUser(userID);
}
}
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java Wed Jan 27 08:20:58 2010
@@ -19,6 +19,7 @@
import java.io.IOException;
import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
@@ -28,7 +29,7 @@
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.namenode.StreamFile;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
/** {@inheritDoc} */
public class ProxyStreamFile extends StreamFile {
@@ -47,32 +48,31 @@
/** {@inheritDoc} */
@Override
protected DFSClient getDFSClient(HttpServletRequest request)
- throws IOException {
+ throws IOException, InterruptedException {
ServletContext context = getServletContext();
- Configuration conf = new HdfsConfiguration((Configuration) context
+ final Configuration conf = new HdfsConfiguration((Configuration) context
.getAttribute("name.conf"));
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, getUGI(request));
- InetSocketAddress nameNodeAddr = (InetSocketAddress) context
+ final InetSocketAddress nameNodeAddr = (InetSocketAddress) context
.getAttribute("name.node.address");
- return new DFSClient(nameNodeAddr, conf);
+
+ DFSClient client =
+ getUGI(request).doAs(new PrivilegedExceptionAction<DFSClient>() {
+ @Override
+ public DFSClient run() throws IOException {
+ return new DFSClient(nameNodeAddr, conf);
+ }
+ });
+
+ return client;
}
/** {@inheritDoc} */
@Override
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
+ protected UserGroupInformation getUGI(HttpServletRequest request) {
String userID = (String) request
.getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
- String groupName = (String) request
- .getAttribute("org.apache.hadoop.hdfsproxy.authorized.role");
- UnixUserGroupInformation ugi;
- if (groupName != null) {
- // get group info from ldap
- ugi = new UnixUserGroupInformation(userID, groupName.split(","));
- } else {// stronger ugi management
- ugi = ProxyUgiManager.getUgiForUser(userID);
- }
- return ugi;
+
+ return UserGroupInformation.createRemoteUser(userID);
}
}
Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java Wed Jan 27 08:20:58 2010
@@ -64,7 +64,7 @@
// warning
private static enum UtilityOption {
- RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get"), CHECKCERTS(
+ RELOAD("-reloadPermFiles"), GET("-get"), CHECKCERTS(
"-checkcerts");
private String name = null;
@@ -303,13 +303,12 @@
public static void main(String[] args) throws Exception {
if (args.length < 1
|| (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])
- && !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])
&& !UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && !UtilityOption.CHECKCERTS
.getName().equalsIgnoreCase(args[0]))
|| (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && args.length != 4)
|| (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) && args.length != 3)) {
System.err.println("Usage: ProxyUtil [" + UtilityOption.RELOAD.getName()
- + "] | [" + UtilityOption.CLEAR.getName() + "] | ["
+ + "] | ["
+ UtilityOption.GET.getName() + " <hostname> <#port> <path> ] | ["
+ UtilityOption.CHECKCERTS.getName() + " <hostname> <#port> ]");
System.exit(0);
@@ -321,9 +320,6 @@
if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
// reload user-certs.xml and user-permissions.xml files
sendCommand(conf, "/reloadPermFiles");
- } else if (UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])) {
- // clear UGI caches
- sendCommand(conf, "/clearUgiCache");
} else if (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0])) {
checkServerCertsExpirationDays(conf, args[1], Integer.parseInt(args[2]));
} else {
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java Wed Jan 27 08:20:58 2010
@@ -110,7 +110,6 @@
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -138,7 +137,7 @@
private static final int TCP_WINDOW_SIZE = 128 * 1024; // 128 KB
private final ClientProtocol namenode;
private final ClientProtocol rpcNamenode;
- final UnixUserGroupInformation ugi;
+ final UserGroupInformation ugi;
volatile boolean clientRunning = true;
private volatile FsServerDefaults serverDefaults;
private volatile long serverDefaultsLastUpdate;
@@ -166,16 +165,13 @@
public static ClientProtocol createNamenode( InetSocketAddress nameNodeAddr,
Configuration conf) throws IOException {
- try {
- return createNamenode(createRPCNamenode(nameNodeAddr, conf,
- UnixUserGroupInformation.login(conf, true)));
- } catch (LoginException e) {
- throw (IOException)(new IOException().initCause(e));
- }
+ return createNamenode(createRPCNamenode(nameNodeAddr, conf,
+ UserGroupInformation.getCurrentUser()));
+
}
private static ClientProtocol createRPCNamenode(InetSocketAddress nameNodeAddr,
- Configuration conf, UnixUserGroupInformation ugi)
+ Configuration conf, UserGroupInformation ugi)
throws IOException {
return (ClientProtocol)RPC.getProxy(ClientProtocol.class,
ClientProtocol.versionID, nameNodeAddr, ugi, conf,
@@ -269,12 +265,8 @@
// The hdfsTimeout is currently the same as the ipc timeout
this.hdfsTimeout = Client.getTimeout(conf);
- try {
- this.ugi = UnixUserGroupInformation.login(conf, true);
- } catch (LoginException e) {
- throw (IOException)(new IOException().initCause(e));
- }
-
+ this.ugi = UserGroupInformation.getCurrentUser();
+
String taskId = conf.get("mapred.task.id");
if (taskId != null) {
this.clientName = "DFSClient_" + taskId;
@@ -1146,7 +1138,6 @@
diskspaceQuota);
}
-
try {
namenode.setQuota(src, namespaceQuota, diskspaceQuota);
} catch(RemoteException re) {
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java Wed Jan 27 08:20:58 2010
@@ -190,4 +190,8 @@
public static final String DFS_NAMENODE_PLUGINS_KEY = "dfs.namenode.plugins";
public static final String DFS_WEB_UGI_KEY = "dfs.web.ugi";
public static final String DFS_NAMENODE_STARTUP_KEY = "dfs.namenode.startup";
+ public static final String DFS_DATANODE_KEYTAB_FILE_KEY = "dfs.datanode.keytab.file";
+ public static final String DFS_DATANODE_USER_NAME_KEY = "dfs.datanode.user.name.key";
+ public static final String DFS_NAMENODE_KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
+ public static final String DFS_NAMENODE_USER_NAME_KEY = "dfs.namenode.user.name.key";
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java Wed Jan 27 08:20:58 2010
@@ -18,8 +18,12 @@
package org.apache.hadoop.hdfs;
+import java.io.IOException;
import java.util.StringTokenizer;
+
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
public class DFSUtil {
/**
@@ -76,6 +80,21 @@
simulation[index] = false;
}
}
-
+
+ /**
+ * If a keytab has been provided, login as that user.
+ */
+ public static void login(final Configuration conf,
+ final String keytabFileKey,
+ final String userNameKey)
+ throws IOException {
+ String keytabFilename = conf.get(keytabFileKey);
+
+ if(keytabFilename == null)
+ return;
+
+ String user = conf.get(userNameKey, System.getProperty("user.name"));
+ UserGroupInformation.loginUserFromKeytab(user, keytabFilename);
+ }
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/HftpFileSystem.java Wed Jan 27 08:20:58 2010
@@ -33,8 +33,6 @@
import java.util.Random;
import java.util.TimeZone;
-import javax.security.auth.login.LoginException;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.CreateFlag;
@@ -48,10 +46,8 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.StringUtils;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
@@ -96,12 +92,7 @@
public void initialize(URI name, Configuration conf) throws IOException {
super.initialize(name, conf);
setConf(conf);
- try {
- this.ugi = UnixUserGroupInformation.login(conf, true);
- } catch (LoginException le) {
- throw new IOException(StringUtils.stringifyException(le));
- }
-
+ this.ugi = UserGroupInformation.getCurrentUser();
nnAddr = NetUtils.createSocketAddr(name.toString());
}
@@ -121,7 +112,7 @@
Construct URL pointing to file on namenode
*/
URL getNamenodeFileURL(Path f) throws IOException {
- return getNamenodeURL("/data" + f.toUri().getPath(), "ugi=" + ugi);
+ return getNamenodeURL("/data" + f.toUri().getPath(), "ugi=" + ugi.getUserName());
}
/*
@@ -156,7 +147,7 @@
@Override
public FSDataInputStream open(Path f, int buffersize) throws IOException {
- URL u = getNamenodeURL("/data" + f.toUri().getPath(), "ugi=" + ugi);
+ URL u = getNamenodeURL("/data" + f.toUri().getPath(), "ugi=" + ugi.getUserName());
return new FSDataInputStream(new ByteRangeInputStream(u));
}
@@ -206,7 +197,7 @@
XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
HttpURLConnection connection = openConnection("/listPaths" + path,
- "ugi=" + ugi + (recur? "&recursive=yes" : ""));
+ "ugi=" + ugi.getUserName() + (recur? "&recursive=yes" : ""));
InputStream resp = connection.getInputStream();
xr.parse(new InputSource(resp));
@@ -270,7 +261,7 @@
private FileChecksum getFileChecksum(String f) throws IOException {
final HttpURLConnection connection = openConnection(
- "/fileChecksum" + f, "ugi=" + ugi);
+ "/fileChecksum" + f, "ugi=" + ugi.getUserName());
try {
final XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/AccessTokenHandler.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/AccessTokenHandler.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/AccessTokenHandler.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/security/AccessTokenHandler.java Wed Jan 27 08:20:58 2010
@@ -220,7 +220,7 @@
/** Generate an access token for current user */
public BlockAccessToken generateToken(long blockID, EnumSet<AccessMode> modes)
throws IOException {
- UserGroupInformation ugi = UserGroupInformation.getCurrentUGI();
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String userID = (ugi == null ? null : ugi.getUserName());
return generateToken(userID, blockID, modes);
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java Wed Jan 27 08:20:58 2010
@@ -25,7 +25,6 @@
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.lang.Class;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
@@ -81,7 +80,6 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.NetworkTopology;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.StringUtils;
@@ -923,11 +921,7 @@
methodNameToPolicyMap.put("getAccessKeys", methodPolicy);
UserGroupInformation ugi;
- try {
- ugi = UnixUserGroupInformation.login(conf);
- } catch (javax.security.auth.login.LoginException e) {
- throw new IOException(StringUtils.stringifyException(e));
- }
+ ugi = UserGroupInformation.getCurrentUser();
return (NamenodeProtocol) RetryProxy.create(
NamenodeProtocol.class,
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Wed Jan 27 08:20:58 2010
@@ -42,26 +42,17 @@
import org.apache.hadoop.hdfs.security.BlockAccessToken;
import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.util.VersionInfo;
public class JspHelper {
final static public String WEB_UGI_PROPERTY_NAME = "dfs.web.ugi";
public static final Configuration conf = new HdfsConfiguration();
- public static final UnixUserGroupInformation webUGI
- = UnixUserGroupInformation.createImmutable(
- conf.getStrings(WEB_UGI_PROPERTY_NAME));
-
+
private static final int defaultChunkSizeToView =
conf.getInt("dfs.default.chunk.view.size", 32 * 1024);
static final Random rand = new Random();
- static {
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, webUGI);
- }
-
/** Private constructor for preventing creating JspHelper object. */
private JspHelper() {}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Wed Jan 27 08:20:58 2010
@@ -85,6 +85,7 @@
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.io.IOUtils;
@@ -93,9 +94,7 @@
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.DNS;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.authorize.ConfiguredPolicy;
-import org.apache.hadoop.security.authorize.PolicyProvider;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.DiskChecker;
@@ -227,19 +226,25 @@
* Create the DataNode given a configuration and an array of dataDirs.
* 'dataDirs' is where the blocks are stored.
*/
- DataNode(Configuration conf,
- AbstractList<File> dataDirs) throws IOException {
+ DataNode(final Configuration conf,
+ final AbstractList<File> dataDirs) throws IOException {
super(conf);
+
+ UserGroupInformation.setConfiguration(conf);
+ DFSUtil.login(conf,
+ DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
+ DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY);
+
DataNode.setDataNode(this);
+
try {
startDataNode(conf, dataDirs);
} catch (IOException ie) {
shutdown();
- throw ie;
- }
+ throw ie;
+ }
}
-
-
+
/**
* This method starts the data node with the specified conf.
*
@@ -392,13 +397,8 @@
// set service-level authorization security policy
if (conf.getBoolean(
ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, false)) {
- PolicyProvider policyProvider =
- (PolicyProvider)(ReflectionUtils.newInstance(
- conf.getClass(PolicyProvider.POLICY_PROVIDER_CONFIG,
- HDFSPolicyProvider.class, PolicyProvider.class),
- conf));
- SecurityUtil.setPolicy(new ConfiguredPolicy(conf, policyProvider));
- }
+ ServiceAuthorizationManager.refresh(conf, new HDFSPolicyProvider());
+ }
//init ipc server
InetSocketAddress ipcAddr = NetUtils.createSocketAddr(
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java Wed Jan 27 08:20:58 2010
@@ -19,6 +19,7 @@
import java.io.IOException;
import java.io.PrintWriter;
+import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
@@ -27,7 +28,7 @@
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.znerd.xmlenc.XMLOutputter;
/** Servlets for file checksum */
@@ -36,34 +37,44 @@
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
- public void doGet(HttpServletRequest request, HttpServletResponse response
- ) throws ServletException, IOException {
- final UnixUserGroupInformation ugi = getUGI(request);
- final String path = request.getPathInfo();
-
- final PrintWriter out = response.getWriter();
- final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
- xml.declaration();
+ public void doGet(final HttpServletRequest request,
+ final HttpServletResponse response) throws ServletException, IOException {
+ final UserGroupInformation ugi = getUGI(request);
try {
- //get content summary
- final ClientProtocol nnproxy = createNameNodeProxy(ugi);
- final ContentSummary cs = nnproxy.getContentSummary(path);
-
- //write xml
- xml.startTag(ContentSummary.class.getName());
- if (cs != null) {
- xml.attribute("length" , "" + cs.getLength());
- xml.attribute("fileCount" , "" + cs.getFileCount());
- xml.attribute("directoryCount", "" + cs.getDirectoryCount());
- xml.attribute("quota" , "" + cs.getQuota());
- xml.attribute("spaceConsumed" , "" + cs.getSpaceConsumed());
- xml.attribute("spaceQuota" , "" + cs.getSpaceQuota());
- }
- xml.endTag();
- } catch(IOException ioe) {
- new RemoteException(ioe.getClass().getName(), ioe.getMessage()
- ).writeXml(path, xml);
+ ugi.doAs(new PrivilegedExceptionAction<Object>() {
+ @Override
+ public Object run() throws Exception {
+ final String path = request.getPathInfo();
+
+ final PrintWriter out = response.getWriter();
+ final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
+ xml.declaration();
+ try {
+ //get content summary
+ final ClientProtocol nnproxy = createNameNodeProxy();
+ final ContentSummary cs = nnproxy.getContentSummary(path);
+
+ //write xml
+ xml.startTag(ContentSummary.class.getName());
+ if (cs != null) {
+ xml.attribute("length" , "" + cs.getLength());
+ xml.attribute("fileCount" , "" + cs.getFileCount());
+ xml.attribute("directoryCount", "" + cs.getDirectoryCount());
+ xml.attribute("quota" , "" + cs.getQuota());
+ xml.attribute("spaceConsumed" , "" + cs.getSpaceConsumed());
+ xml.attribute("spaceQuota" , "" + cs.getSpaceQuota());
+ }
+ xml.endTag();
+ } catch(IOException ioe) {
+ new RemoteException(ioe.getClass().getName(), ioe.getMessage()
+ ).writeXml(path, xml);
+ }
+ xml.endDocument();
+ return null;
+ }
+ });
+ } catch (InterruptedException e) {
+ throw new IOException(e);
}
- xml.endDocument();
}
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java Wed Jan 27 08:20:58 2010
@@ -36,7 +36,6 @@
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
/**
@@ -48,29 +47,43 @@
static final Log LOG = LogFactory.getLog(DfsServlet.class.getCanonicalName());
- /** Get {@link UserGroupInformation} from request */
- protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
- String ugi = request.getParameter("ugi");
- try {
- return new UnixUserGroupInformation(ugi.split(","));
+ /** Get {@link UserGroupInformation} from request
+ * @throws IOException */
+ protected UserGroupInformation getUGI(HttpServletRequest request)
+ throws IOException {
+ UserGroupInformation u = null;
+ if(UserGroupInformation.isSecurityEnabled()) {
+ String user = request.getRemoteUser();
+ if(user != null)
+ throw new IOException("Security enabled but user not " +
+ "authenticated by filter");
+
+ u = UserGroupInformation.createRemoteUser(user);
+ } else { // Security's not on, pull from url
+ String ugi = request.getParameter("ugi");
+
+ if(ugi == null) // not specified in request
+ ugi = new Configuration().get(JspHelper.WEB_UGI_PROPERTY_NAME);
+
+ if(ugi == null) // not specified in conf either
+ throw new IOException("Cannot determine UGI from request or conf");
+
+ u = UserGroupInformation.createRemoteUser(ugi);
}
- catch(Exception e) {
- LOG.warn("Invalid ugi (= " + ugi + ")");
- }
- return JspHelper.webUGI;
+
+ if(LOG.isDebugEnabled())
+ LOG.debug("getUGI is returning: " + u.getUserName());
+ return u;
}
/**
* Create a {@link NameNode} proxy from the current {@link ServletContext}.
*/
- protected ClientProtocol createNameNodeProxy(UnixUserGroupInformation ugi
- ) throws IOException {
+ protected ClientProtocol createNameNodeProxy() throws IOException {
ServletContext context = getServletContext();
InetSocketAddress nnAddr = (InetSocketAddress)context.getAttribute("name.node.address");
Configuration conf = new HdfsConfiguration(
(Configuration)context.getAttribute("name.conf"));
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
return DFSClient.createNamenode(nnAddr, conf);
}
@@ -85,7 +98,7 @@
: host.getInfoPort();
final String filename = request.getPathInfo();
return new URI(scheme, null, hostname, port, servletpath,
- "filename=" + filename + "&ugi=" + ugi, null);
+ "filename=" + filename + "&ugi=" + ugi.getUserName(), null);
}
/** Get filename from the request */
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Wed Jan 27 08:20:58 2010
@@ -33,8 +33,6 @@
import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMBean;
import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMetrics;
import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.PermissionChecker;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -393,11 +391,8 @@
*/
private void setConfigurationParameters(Configuration conf)
throws IOException {
- try {
- fsOwner = UnixUserGroupInformation.login(conf);
- } catch (LoginException e) {
- throw new IOException(StringUtils.stringifyException(e));
- }
+ fsOwner = UserGroupInformation.getCurrentUser();
+
LOG.info("fsOwner=" + fsOwner);
this.supergroup = conf.get(DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY,
@@ -646,7 +641,7 @@
getEditLog().logSync();
if (auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(src);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setPermission", src, null, stat);
}
@@ -674,7 +669,7 @@
getEditLog().logSync();
if (auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(src);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setOwner", src, null, stat);
}
@@ -718,7 +713,7 @@
final LocatedBlocks ret = getBlockLocationsInternal(src,
offset, length, doAccessTime);
if (auditLog.isInfoEnabled()) {
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"open", src, null, null);
}
@@ -911,7 +906,7 @@
if (auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(target);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getLoginUser(),
Server.getRemoteIp(),
"concat", Arrays.toString(srcs), target, stat);
}
@@ -938,7 +933,7 @@
dir.setTimes(src, inode, mtime, atime, true);
if (auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(src);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setTimes", src, null, stat);
}
@@ -965,7 +960,7 @@
boolean status = setReplicationInternal(src, replication);
getEditLog().logSync();
if (status && auditLog.isInfoEnabled()) {
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"setReplication", src, null, null);
}
@@ -1051,7 +1046,7 @@
getEditLog().logSync();
if (auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(src);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"create", src, null, stat);
}
@@ -1298,7 +1293,7 @@
}
if (auditLog.isInfoEnabled()) {
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"append", src, null, null);
}
@@ -1606,7 +1601,7 @@
getEditLog().logSync();
if (status && auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(dst);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"rename", src, dst, stat);
}
@@ -1653,7 +1648,7 @@
cmd.append(option.value()).append(" ");
}
final FileStatus stat = dir.getFileInfo(dst);
- logAuditEvent(UserGroupInformation.getCurrentUGI(), Server.getRemoteIp(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(), Server.getRemoteIp(),
cmd.toString(), src, dst, stat);
}
}
@@ -1693,7 +1688,7 @@
}
boolean status = deleteInternal(src, true);
if (status && auditLog.isInfoEnabled()) {
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"delete", src, null, null);
}
@@ -1793,7 +1788,7 @@
getEditLog().logSync();
if (status && auditLog.isInfoEnabled()) {
final FileStatus stat = dir.getFileInfo(src);
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"mkdirs", src, null, stat);
}
@@ -2153,7 +2148,7 @@
}
}
if (auditLog.isInfoEnabled()) {
- logAuditEvent(UserGroupInformation.getCurrentUGI(),
+ logAuditEvent(UserGroupInformation.getCurrentUser(),
Server.getRemoteIp(),
"listStatus", src, null, null);
}
@@ -3892,7 +3887,7 @@
private void checkSuperuserPrivilege() throws AccessControlException {
if (isPermissionEnabled) {
- PermissionChecker.checkSuperuserPrivilege(fsOwner, supergroup);
+ FSPermissionChecker.checkSuperuserPrivilege(fsOwner, supergroup);
}
}
@@ -4346,7 +4341,7 @@
public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
throws IOException {
- String user = UserGroupInformation.getCurrentUGI().getUserName();
+ String user = UserGroupInformation.getCurrentUser().getUserName();
Text owner = new Text(user);
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(owner, renewer);
return new Token<DelegationTokenIdentifier>(dtId, dtSecretManager);
@@ -4354,13 +4349,13 @@
public Boolean renewDelegationToken(Token<DelegationTokenIdentifier> token)
throws InvalidToken, IOException {
- String renewer = UserGroupInformation.getCurrentUGI().getUserName();
+ String renewer = UserGroupInformation.getCurrentUser().getUserName();
return dtSecretManager.renewToken(token, renewer);
}
public Boolean cancelDelegationToken(Token<DelegationTokenIdentifier> token)
throws IOException {
- String canceller = UserGroupInformation.getCurrentUGI().getUserName();
+ String canceller = UserGroupInformation.getCurrentUser().getUserName();
return dtSecretManager.cancelToken(token, canceller);
}
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java Wed Jan 27 08:20:58 2010
@@ -17,6 +17,10 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
import java.util.Stack;
import org.apache.commons.logging.Log;
@@ -24,19 +28,55 @@
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.PermissionChecker;
import org.apache.hadoop.security.UserGroupInformation;
/** Perform permission checking in {@link FSNamesystem}. */
-class FSPermissionChecker extends PermissionChecker {
+class FSPermissionChecker {
static final Log LOG = LogFactory.getLog(UserGroupInformation.class);
+ private final UserGroupInformation ugi;
+ public final String user;
+ private final Set<String> groups = new HashSet<String>();
+ public final boolean isSuper;
+
FSPermissionChecker(String fsOwner, String supergroup
) throws AccessControlException{
- super(fsOwner, supergroup);
+ try {
+ ugi = UserGroupInformation.getCurrentUser();
+ } catch (IOException e) {
+ throw new AccessControlException(e);
+ }
+
+ groups.addAll(Arrays.asList(ugi.getGroupNames()));
+ user = ugi.getUserName();
+
+ isSuper = user.equals(fsOwner) || groups.contains(supergroup);
}
/**
+ * Check if the callers group contains the required values.
+ * @param group group to check
+ */
+ public boolean containsGroup(String group) {return groups.contains(group);}
+
+ /**
+ * Verify if the caller has the required permission. This will result into
+ * an exception if the caller is not allowed to access the resource.
+ * @param owner owner of the system
+ * @param supergroup supergroup of the system
+ */
+ public static void checkSuperuserPrivilege(UserGroupInformation owner,
+ String supergroup)
+ throws AccessControlException {
+ FSPermissionChecker checker =
+ new FSPermissionChecker(owner.getUserName(), supergroup);
+ if (!checker.isSuper) {
+ throw new AccessControlException("Access denied for user "
+ + checker.user + ". Superuser privilege is required");
+ }
+ }
+
+ /**
* Check whether current user have permissions to access the path.
* Traverse is always checked.
*
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java Wed Jan 27 08:20:58 2010
@@ -21,6 +21,7 @@
import java.io.PrintWriter;
import java.net.URI;
import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
import javax.net.SocketFactory;
import javax.servlet.ServletContext;
@@ -39,7 +40,6 @@
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.znerd.xmlenc.XMLOutputter;
@@ -77,7 +77,6 @@
/** {@inheritDoc} */
public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException {
- final UnixUserGroupInformation ugi = getUGI(request);
final PrintWriter out = response.getWriter();
final String filename = getFilename(request, response);
final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
@@ -86,17 +85,25 @@
final Configuration conf = new HdfsConfiguration(DataNode.getDataNode().getConf());
final int socketTimeout = conf.getInt(DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, HdfsConstants.READ_TIMEOUT);
final SocketFactory socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class);
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
- final ClientProtocol nnproxy = DFSClient.createNamenode(conf);
-
+
try {
+ ClientProtocol nnproxy = getUGI(request).doAs(new PrivilegedExceptionAction<ClientProtocol>() {
+ @Override
+ public ClientProtocol run() throws IOException {
+ return DFSClient.createNamenode(conf);
+ }
+ });
+
final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum(
filename, nnproxy, socketFactory, socketTimeout);
MD5MD5CRC32FileChecksum.write(xml, checksum);
} catch(IOException ioe) {
new RemoteException(ioe.getClass().getName(), ioe.getMessage()
).writeXml(filename, xml);
+ } catch (InterruptedException e) {
+ new RemoteException(e.getClass().getName(), e.getMessage()
+ ).writeXml(filename, xml);
+
}
xml.endDocument();
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Wed Jan 27 08:20:58 2010
@@ -20,6 +20,8 @@
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -29,7 +31,7 @@
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.JspHelper;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
/** Redirect queries about the hosted filesystem to an appropriate datanode.
* @see org.apache.hadoop.hdfs.HftpFileSystem
@@ -39,7 +41,7 @@
private static final long serialVersionUID = 1L;
/** Create a redirection URI */
- protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
+ protected URI createUri(FileStatus i, UserGroupInformation ugi,
ClientProtocol nnproxy, HttpServletRequest request)
throws IOException, URISyntaxException {
String scheme = request.getScheme();
@@ -54,7 +56,8 @@
"https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort(),
- "/streamFile", "filename=" + i.getPath() + "&ugi=" + ugi, null);
+ "/streamFile", "filename=" + i.getPath() +
+ "&ugi=" + ugi.getUserName(), null);
}
/** Select a datanode to service this request.
@@ -81,13 +84,20 @@
* }
*/
public void doGet(HttpServletRequest request, HttpServletResponse response)
- throws IOException {
- final UnixUserGroupInformation ugi = getUGI(request);
- final ClientProtocol nnproxy = createNameNodeProxy(ugi);
+ throws IOException {
+ final UserGroupInformation ugi = getUGI(request);
try {
- final String path = request.getPathInfo() != null
- ? request.getPathInfo() : "/";
+ final ClientProtocol nnproxy = ugi
+ .doAs(new PrivilegedExceptionAction<ClientProtocol>() {
+ @Override
+ public ClientProtocol run() throws IOException {
+ return createNameNodeProxy();
+ }
+ });
+
+ final String path = request.getPathInfo() != null ?
+ request.getPathInfo() : "/";
FileStatus info = nnproxy.getFileInfo(path);
if ((info != null) && !info.isDir()) {
response.sendRedirect(createUri(info, ugi, nnproxy,
@@ -101,8 +111,9 @@
response.getWriter().println(e.toString());
} catch (IOException e) {
response.sendError(400, e.getMessage());
+ } catch (InterruptedException e) {
+ response.sendError(400, e.getMessage());
}
}
}
-
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java Wed Jan 27 08:20:58 2010
@@ -19,6 +19,7 @@
import java.io.IOException;
import java.io.PrintWriter;
+import java.security.PrivilegedExceptionAction;
import java.util.Map;
import javax.servlet.ServletContext;
@@ -28,7 +29,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
/**
@@ -45,20 +45,30 @@
final Map<String,String[]> pmap = request.getParameterMap();
final PrintWriter out = response.getWriter();
- final UnixUserGroupInformation ugi = getUGI(request);
- UserGroupInformation.setCurrentUser(ugi);
-
- final ServletContext context = getServletContext();
- final Configuration conf = new HdfsConfiguration((Configuration) context.getAttribute("name.conf"));
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
-
- final NameNode nn = (NameNode) context.getAttribute("name.node");
- final FSNamesystem namesystem = nn.getNamesystem();
- final int totalDatanodes = namesystem.getNumberOfDatanodes(DatanodeReportType.LIVE);
- final short minReplication = namesystem.getMinReplication();
-
- new NamenodeFsck(conf, nn, nn.getNetworkTopology(), pmap, out,
- totalDatanodes, minReplication).fsck();
+ final UserGroupInformation ugi = getUGI(request);
+ try {
+ ugi.doAs(new PrivilegedExceptionAction<Object>() {
+ @Override
+ public Object run() throws Exception {
+ final ServletContext context = getServletContext();
+ final Configuration conf =
+ new HdfsConfiguration((Configuration)context.getAttribute("name.conf"));
+
+ NameNode nn = (NameNode) context.getAttribute("name.node");
+
+ final FSNamesystem namesystem = nn.getNamesystem();
+ final int totalDatanodes =
+ namesystem.getNumberOfDatanodes(DatanodeReportType.LIVE);
+ final short minReplication = namesystem.getMinReplication();
+
+ new NamenodeFsck(conf, nn, nn.getNetworkTopology(), pmap, out,
+ totalDatanodes, minReplication).fsck();
+
+ return null;
+ }
+ });
+ } catch (InterruptedException e) {
+ response.sendError(400, e.getMessage());
+ }
}
}
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java Wed Jan 27 08:20:58 2010
@@ -21,13 +21,13 @@
import org.apache.hadoop.hdfs.HftpFileSystem;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.util.VersionInfo;
import org.znerd.xmlenc.*;
import java.io.IOException;
import java.io.PrintWriter;
+import java.security.PrivilegedExceptionAction;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
@@ -125,7 +125,6 @@
*/
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
- final UnixUserGroupInformation ugi = getUGI(request);
final PrintWriter out = response.getWriter();
final XMLOutputter doc = new XMLOutputter(out, "UTF-8");
try {
@@ -134,7 +133,14 @@
final boolean recur = "yes".equals(root.get("recursive"));
final Pattern filter = Pattern.compile(root.get("filter"));
final Pattern exclude = Pattern.compile(root.get("exclude"));
- ClientProtocol nnproxy = createNameNodeProxy(ugi);
+
+ ClientProtocol nnproxy =
+ getUGI(request).doAs(new PrivilegedExceptionAction<ClientProtocol>() {
+ @Override
+ public ClientProtocol run() throws IOException {
+ return createNameNodeProxy();
+ }
+ });
doc.declaration();
doc.startTag("listing");
@@ -173,6 +179,9 @@
if (doc != null) {
doc.endDocument();
}
+ } catch (InterruptedException e) {
+ LOG.warn("ListPathServlet encountered InterruptedException", e);
+ response.sendError(400, e.getMessage());
} finally {
if (out != null) {
out.close();
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Wed Jan 27 08:20:58 2010
@@ -50,7 +50,6 @@
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
import org.apache.hadoop.hdfs.security.ExportedAccessKeys;
-import org.apache.hadoop.security.RefreshUserToGroupMappingsProtocol;
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.NamenodeRole;
@@ -68,6 +67,7 @@
import org.apache.hadoop.hdfs.server.protocol.NodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.io.EnumSetWritable;
@@ -78,11 +78,10 @@
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.Groups;
+import org.apache.hadoop.security.RefreshUserToGroupMappingsProtocol;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.ConfiguredPolicy;
-import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.token.Token;
@@ -293,12 +292,7 @@
if (serviceAuthEnabled =
conf.getBoolean(
ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, false)) {
- PolicyProvider policyProvider =
- (PolicyProvider)(ReflectionUtils.newInstance(
- conf.getClass(PolicyProvider.POLICY_PROVIDER_CONFIG,
- HDFSPolicyProvider.class, PolicyProvider.class),
- conf));
- SecurityUtil.setPolicy(new ConfiguredPolicy(conf, policyProvider));
+ ServiceAuthorizationManager.refresh(conf, new HDFSPolicyProvider());
}
// create rpc server
@@ -417,6 +411,11 @@
}
protected NameNode(Configuration conf, NamenodeRole role) throws IOException {
+ UserGroupInformation.setConfiguration(conf);
+ DFSUtil.login(conf,
+ DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
+ DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY);
+
this.role = role;
try {
initialize(conf);
@@ -608,7 +607,7 @@
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
namesystem.startFile(src,
- new PermissionStatus(UserGroupInformation.getCurrentUGI().getUserName(),
+ new PermissionStatus(UserGroupInformation.getCurrentUser().getUserName(),
null, masked),
clientName, clientMachine, flag.get(), createParent, replication, blockSize);
myMetrics.numFilesCreated.inc();
@@ -815,7 +814,7 @@
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
return namesystem.mkdirs(src,
- new PermissionStatus(UserGroupInformation.getCurrentUGI().getUserName(),
+ new PermissionStatus(UserGroupInformation.getCurrentUser().getUserName(),
null, masked), createParent);
}
@@ -1176,14 +1175,15 @@
throw new AuthorizationException("Service Level Authorization not enabled!");
}
- SecurityUtil.getPolicy().refresh();
+ ServiceAuthorizationManager.refresh(
+ new Configuration(), new HDFSPolicyProvider());
}
@Override
public void refreshUserToGroupsMappings(Configuration conf) throws IOException {
LOG.info("Refreshing all user-to-groups mappings. Requested by user: " +
- UserGroupInformation.getCurrentUGI().getUserName());
- SecurityUtil.getUserToGroupsMappingService(conf).refresh();
+ UserGroupInformation.getCurrentUser().getUserName());
+ Groups.getUserToGroupsMappingService(conf).refresh();
}
private static void printUsage() {
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java Wed Jan 27 08:20:58 2010
@@ -21,6 +21,7 @@
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
import java.util.Enumeration;
import java.util.List;
import javax.servlet.ServletException;
@@ -32,7 +33,6 @@
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.mortbay.jetty.InclusiveByteRange;
public class StreamFile extends DfsServlet {
@@ -50,11 +50,18 @@
/** getting a client for connecting to dfs */
protected DFSClient getDFSClient(HttpServletRequest request)
- throws IOException {
- Configuration conf = new HdfsConfiguration(masterConf);
- UnixUserGroupInformation.saveToConf(conf,
- UnixUserGroupInformation.UGI_PROPERTY_NAME, getUGI(request));
- return new DFSClient(nameNodeAddr, conf);
+ throws IOException, InterruptedException {
+ final Configuration conf = new HdfsConfiguration(masterConf);
+
+ DFSClient client =
+ getUGI(request).doAs(new PrivilegedExceptionAction<DFSClient>() {
+ @Override
+ public DFSClient run() throws IOException {
+ return new DFSClient(nameNodeAddr, conf);
+ }
+ });
+
+ return client;
}
public void doGet(HttpServletRequest request, HttpServletResponse response)
@@ -72,7 +79,14 @@
if (reqRanges != null && !reqRanges.hasMoreElements())
reqRanges = null;
- DFSClient dfs = getDFSClient(request);
+ DFSClient dfs;
+ try {
+ dfs = getDFSClient(request);
+ } catch (InterruptedException e) {
+ response.sendError(400, e.getMessage());
+ return;
+ }
+
long fileLen = dfs.getFileInfo(filename).getLen();
FSInputStream in = dfs.open(filename);
OutputStream os = response.getOutputStream();
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java Wed Jan 27 08:20:58 2010
@@ -24,8 +24,6 @@
import java.util.List;
import java.util.TreeSet;
-import javax.security.auth.login.LoginException;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
@@ -45,7 +43,7 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.RefreshUserToGroupMappingsProtocol;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
@@ -716,16 +714,9 @@
return 0;
}
- private static UnixUserGroupInformation getUGI(Configuration conf)
+ private static UserGroupInformation getUGI()
throws IOException {
- UnixUserGroupInformation ugi = null;
- try {
- ugi = UnixUserGroupInformation.login(conf, true);
- } catch (LoginException e) {
- throw (IOException)(new IOException(
- "Failed to get the current user's information.").initCause(e));
- }
- return ugi;
+ return UserGroupInformation.getCurrentUser();
}
/**
@@ -742,7 +733,7 @@
(RefreshAuthorizationPolicyProtocol)
RPC.getProxy(RefreshAuthorizationPolicyProtocol.class,
RefreshAuthorizationPolicyProtocol.versionID,
- NameNode.getAddress(conf), getUGI(conf), conf,
+ NameNode.getAddress(conf), getUGI(), conf,
NetUtils.getSocketFactory(conf,
RefreshAuthorizationPolicyProtocol.class));
@@ -766,7 +757,7 @@
(RefreshUserToGroupMappingsProtocol)
RPC.getProxy(RefreshUserToGroupMappingsProtocol.class,
RefreshUserToGroupMappingsProtocol.versionID,
- NameNode.getAddress(conf), getUGI(conf), conf,
+ NameNode.getAddress(conf), getUGI(), conf,
NetUtils.getSocketFactory(conf,
RefreshUserToGroupMappingsProtocol.class));
Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java Wed Jan 27 08:20:58 2010
@@ -25,14 +25,11 @@
import java.net.URLConnection;
import java.net.URLEncoder;
-import javax.security.auth.login.LoginException;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@@ -72,11 +69,10 @@
/**
* Filesystem checker.
* @param conf current Configuration
- * @throws LoginException if login failed
*/
- public DFSck(Configuration conf) throws LoginException {
+ public DFSck(Configuration conf) throws IOException {
super(conf);
- this.ugi = UnixUserGroupInformation.login(conf, true);
+ this.ugi = UserGroupInformation.getCurrentUser();
}
/**
@@ -110,7 +106,7 @@
final StringBuffer url = new StringBuffer("http://");
url.append(getConf().get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT));
- url.append("/fsck?ugi=").append(ugi).append("&path=");
+ url.append("/fsck?ugi=").append(ugi.getUserName()).append("&path=");
String dir = "/";
// find top-level dir first
Modified: hadoop/hdfs/trunk/src/test/hdfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs-site.xml?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs-site.xml (original)
+++ hadoop/hdfs/trunk/src/test/hdfs-site.xml Wed Jan 27 08:20:58 2010
@@ -4,6 +4,10 @@
<!-- Put site-specific property overrides in this file. -->
<configuration>
-
+ <!-- Turn security off for tests by default -->
+ <property>
+ <name>hadoop.security.authentication</name>
+ <value>simple</value>
+ </property>
</configuration>
Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml Wed Jan 27 08:20:58 2010
@@ -16329,17 +16329,17 @@
<expected-output></expected-output>
</comparator>
</comparators>
- </test>
+ </test><!--
- <test> <!--Tested -->
+ <test> Tested
<description>refreshServiceAcl: verifying error message while refreshing security authorization policy for namenode</description>
<test-commands>
- <!-- hadoop-policy.xml for tests has
- security.refresh.policy.protocol.acl = ${user.name} -->
+ hadoop-policy.xml for tests has
+ security.refresh.policy.protocol.acl = ${user.name}
<dfs-admin-command>-fs NAMENODE -Dhadoop.job.ugi=blah,blah -refreshServiceAcl </dfs-admin-command>
</test-commands>
<cleanup-commands>
- <!-- No cleanup -->
+ No cleanup
</cleanup-commands>
<comparators>
<comparator>
@@ -16349,7 +16349,7 @@
</comparators>
</test>
- <!-- Test for safemode -->
+ --><!-- Test for safemode -->
<test> <!-- TESTED -->
<description>safemode: Test for enter - Namenode is not in safemode</description>
<test-commands>
Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsCreateMkdir.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsCreateMkdir.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsCreateMkdir.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsCreateMkdir.java Wed Jan 27 08:20:58 2010
@@ -26,7 +26,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -45,7 +45,7 @@
cluster = new MiniDFSCluster(conf, 2, true, null);
fc = FileContext.getFileContext(cluster.getURI(), conf);
defaultWorkingDirectory = fc.makeQualified( new Path("/user/" +
- UnixUserGroupInformation.login().getUserName()));
+ UserGroupInformation.getCurrentUser().getUserName()));
fc.mkdir(defaultWorkingDirectory, FileContext.DEFAULT_PERM, true);
}
Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsPermission.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsPermission.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsPermission.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestFcHdfsPermission.java Wed Jan 27 08:20:58 2010
@@ -27,7 +27,7 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -45,7 +45,7 @@
cluster = new MiniDFSCluster(conf, 2, true, null);
fc = FileContext.getFileContext(cluster.getURI(), conf);
defaultWorkingDirectory = fc.makeQualified( new Path("/user/" +
- UnixUserGroupInformation.login().getUserName()));
+ UserGroupInformation.getCurrentUser().getUserName()));
fc.mkdir(defaultWorkingDirectory, FileContext.DEFAULT_PERM, true);
}
Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java?rev=903562&r1=903561&r2=903562&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java Wed Jan 27 08:20:58 2010
@@ -29,7 +29,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.hdfs.protocol.FSConstants;
-import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
@@ -52,7 +52,7 @@
cluster.waitClusterUp();
fc = FileContext.getFileContext(cluster.getURI(), CONF);
defaultWorkingDirectory = fc.makeQualified( new Path("/user/" +
- UnixUserGroupInformation.login().getUserName()));
+ UserGroupInformation.getCurrentUser().getUserName()));
fc.mkdir(defaultWorkingDirectory, FileContext.DEFAULT_PERM, true);
}
@@ -65,7 +65,7 @@
cluster.waitClusterUp();
fc = FileContext.getFileContext(cluster.getURI(), CONF);
defaultWorkingDirectory = fc.makeQualified( new Path("/user/" +
- UnixUserGroupInformation.login().getUserName()));
+ UserGroupInformation.getCurrentUser().getUserName()));
fc.mkdir(defaultWorkingDirectory, FileContext.DEFAULT_PERM, true);
}