You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2011/09/16 17:04:27 UTC
svn commit: r1171613 - in /hadoop/common/branches/branch-0.20-security: ./
src/hdfs/ src/hdfs/org/apache/hadoop/hdfs/security/token/delegation/
src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/
src/hdfs/org/apache/hadoop/hdfs/server/nameno...
Author: szetszwo
Date: Fri Sep 16 15:04:26 2011
New Revision: 1171613
URL: http://svn.apache.org/viewvc?rev=1171613&view=rev
Log:
HDFS-2318. Provide authentication to webhdfs using SPNEGO and delegation tokens.
Added:
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
Modified:
hadoop/common/branches/branch-0.20-security/CHANGES.txt
hadoop/common/branches/branch-0.20-security/src/hdfs/hdfs-default.xml
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Fri Sep 16 15:04:26 2011
@@ -49,6 +49,9 @@ Release 0.20.205.0 - unreleased
HDFS-2338. Add configuration option to enable/disable webhdfs.
(jitendra via szetszwo)
+ HDFS-2318. Provide authentication to webhdfs using SPNEGO and delegation
+ tokens. (szetszwo)
+
BUG FIXES
MAPREDUCE-2324. Removed usage of broken
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/hdfs-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/hdfs-default.xml?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/hdfs-default.xml (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/hdfs-default.xml Fri Sep 16 15:04:26 2011
@@ -426,4 +426,24 @@ creations/deletions), or "all".</descrip
</description>
</property>
+<property>
+ <name>dfs.web.authentication.kerberos.principal</name>
+ <value>HTTP/${dfs.web.hostname}@${kerberos.realm}</value>
+ <description>
+ The HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
+
+ The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
+ HTTP SPENGO specification.
+ </description>
+</property>
+
+<property>
+ <name>dfs.web.authentication.kerberos.keytab</name>
+ <value>${user.home}/dfs.web.keytab</value>
+ <description>
+ The Kerberos keytab file with the credentials for the
+ HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
+ </description>
+</property>
+
</configuration>
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/security/token/delegation/DelegationTokenSecretManager.java Fri Sep 16 15:04:26 2011
@@ -19,15 +19,23 @@
package org.apache.hadoop.hdfs.security.token.delegation;
//import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.net.InetSocketAddress;
import java.util.Iterator;
-import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.security.token.delegation.DelegationKey;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
+import org.apache.hadoop.security.token.delegation.DelegationKey;
/**
@@ -278,4 +286,19 @@ public class DelegationTokenSecretManage
throws IOException {
namesystem.logUpdateMasterKey(key);
}
+
+ /** A utility method for creating credentials. */
+ public static Credentials createCredentials(final NameNode namenode,
+ final UserGroupInformation ugi, final String renewer) throws IOException {
+ final Token<DelegationTokenIdentifier> token = namenode.getDelegationToken(
+ new Text(renewer));
+ if (token == null) {
+ throw new IOException("Failed to get the token for " + renewer
+ + ", user=" + ugi.getShortUserName());
+ }
+ SecurityUtil.setTokenService(token, namenode.getNameNodeAddress());
+ final Credentials c = new Credentials();
+ c.addToken(new Text(ugi.getShortUserName()), token);
+ return c;
+ }
}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Fri Sep 16 15:04:26 2011
@@ -23,6 +23,7 @@ import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletContext;
import javax.ws.rs.Consumes;
@@ -61,6 +62,7 @@ import org.apache.hadoop.hdfs.web.resour
import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.security.UserGroupInformation;
/** Web-hdfs DataNode implementation. */
@Path("")
@@ -76,6 +78,7 @@ public class DatanodeWebHdfsMethods {
@Produces({MediaType.APPLICATION_JSON})
public Response put(
final InputStream in,
+ @Context final UserGroupInformation ugi,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
final PutOpParam op,
@@ -89,13 +92,18 @@ public class DatanodeWebHdfsMethods {
final ReplicationParam replication,
@QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
final BlockSizeParam blockSize
- ) throws IOException, URISyntaxException {
+ ) throws IOException, URISyntaxException, InterruptedException {
+
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ": " + path
- + Param.toSortedString(", ", permission, overwrite, bufferSize,
- replication, blockSize));
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ + Param.toSortedString(", ", permission, overwrite, bufferSize,
+ replication, blockSize));
}
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException, URISyntaxException {
+
final String fullpath = path.getAbsolutePath();
final DataNode datanode = (DataNode)context.getAttribute("datanode");
@@ -104,12 +112,12 @@ public class DatanodeWebHdfsMethods {
{
final Configuration conf = new Configuration(datanode.getConf());
final DFSClient dfsclient = new DFSClient(conf);
+ final int b = bufferSize.getValue(conf);
final FSDataOutputStream out = new FSDataOutputStream(dfsclient.create(
fullpath, permission.getFsPermission(), overwrite.getValue(),
- replication.getValue(), blockSize.getValue(), null,
- bufferSize.getValue()), null);
+ replication.getValue(), blockSize.getValue(conf), null, b), null);
try {
- IOUtils.copyBytes(in, out, bufferSize.getValue());
+ IOUtils.copyBytes(in, out, b);
} finally {
out.close();
}
@@ -120,6 +128,8 @@ public class DatanodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+ }
+ });
}
/** Handle HTTP POST request. */
@@ -129,17 +139,23 @@ public class DatanodeWebHdfsMethods {
@Produces({MediaType.APPLICATION_JSON})
public Response post(
final InputStream in,
+ @Context final UserGroupInformation ugi,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
final PostOpParam op,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
- ) throws IOException, URISyntaxException {
+ ) throws IOException, URISyntaxException, InterruptedException {
+
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ": " + path
- + Param.toSortedString(", ", bufferSize));
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ + Param.toSortedString(", ", bufferSize));
}
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException {
+
final String fullpath = path.getAbsolutePath();
final DataNode datanode = (DataNode)context.getAttribute("datanode");
@@ -148,10 +164,10 @@ public class DatanodeWebHdfsMethods {
{
final Configuration conf = new Configuration(datanode.getConf());
final DFSClient dfsclient = new DFSClient(conf);
- final FSDataOutputStream out = dfsclient.append(fullpath,
- bufferSize.getValue(), null, null);
+ final int b = bufferSize.getValue(conf);
+ final FSDataOutputStream out = dfsclient.append(fullpath, b, null, null);
try {
- IOUtils.copyBytes(in, out, bufferSize.getValue());
+ IOUtils.copyBytes(in, out, b);
} finally {
out.close();
}
@@ -160,6 +176,8 @@ public class DatanodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+ }
+ });
}
/** Handle HTTP GET request. */
@@ -167,6 +185,7 @@ public class DatanodeWebHdfsMethods {
@Path("{" + UriFsPathParam.NAME + ":.*}")
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response get(
+ @Context final UserGroupInformation ugi,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
final GetOpParam op,
@@ -176,13 +195,17 @@ public class DatanodeWebHdfsMethods {
final LengthParam length,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
- ) throws IOException, URISyntaxException {
+ ) throws IOException, URISyntaxException, InterruptedException {
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ": " + path
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ Param.toSortedString(", ", offset, length, bufferSize));
}
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException {
+
final String fullpath = path.getAbsolutePath();
final DataNode datanode = (DataNode)context.getAttribute("datanode");
@@ -192,8 +215,9 @@ public class DatanodeWebHdfsMethods {
final Configuration conf = new Configuration(datanode.getConf());
final InetSocketAddress nnRpcAddr = NameNode.getAddress(conf);
final DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
+ final int b = bufferSize.getValue(conf);
final DFSDataInputStream in = new DFSClient.DFSDataInputStream(
- dfsclient.open(fullpath, bufferSize.getValue(), true, null));
+ dfsclient.open(fullpath, b, true, null));
in.seek(offset.getValue());
final StreamingOutput streaming = new StreamingOutput() {
@@ -201,9 +225,9 @@ public class DatanodeWebHdfsMethods {
public void write(final OutputStream out) throws IOException {
final Long n = length.getValue();
if (n == null) {
- IOUtils.copyBytes(in, out, bufferSize.getValue());
+ IOUtils.copyBytes(in, out, b);
} else {
- IOUtils.copyBytes(in, out, n, bufferSize.getValue(), false);
+ IOUtils.copyBytes(in, out, n, b, false);
}
}
};
@@ -212,5 +236,7 @@ public class DatanodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+ }
+ });
}
}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java Fri Sep 16 15:04:26 2011
@@ -29,6 +29,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
@@ -72,13 +73,9 @@ public class GetDelegationTokenServlet e
final DataOutputStream dosFinal = dos; // for doAs block
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
- public Void run() throws Exception {
-
- Token<DelegationTokenIdentifier> token =
- nn.getDelegationToken(new Text(renewerFinal));
- SecurityUtil.setTokenService(token, NameNode.getAddress(conf));
- Credentials ts = new Credentials();
- ts.addToken(new Text(ugi.getShortUserName()), token);
+ public Void run() throws IOException {
+ final Credentials ts = DelegationTokenSecretManager.createCredentials(
+ nn, ugi, renewerFinal);
ts.write(dosFinal);
dosFinal.close();
return null;
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Fri Sep 16 15:04:26 2011
@@ -48,6 +48,7 @@ import org.apache.hadoop.hdfs.security.t
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.http.HtmlQuoting;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException;
@@ -424,19 +425,31 @@ public class JspHelper {
return UserGroupInformation.createRemoteUser(strings[0]);
}
+ /** Same as getUGI(request, conf, KERBEROS_SSL, true). */
+ public static UserGroupInformation getUGI(final HttpServletRequest request,
+ final Configuration conf) throws IOException {
+ return getUGI(request, conf, AuthenticationMethod.KERBEROS_SSL, true);
+ }
+
/**
* Get {@link UserGroupInformation} and possibly the delegation token out of
* the request.
* @param request the http request
+ * @param conf configuration
+ * @param secureAuthMethod the AuthenticationMethod used in secure mode.
+ * @param tryUgiParameter Should it try the ugi parameter?
* @return a new user from the request
* @throws AccessControlException if the request has no token
*/
public static UserGroupInformation getUGI(HttpServletRequest request,
- Configuration conf
- ) throws IOException {
- UserGroupInformation ugi = null;
+ Configuration conf,
+ final AuthenticationMethod secureAuthMethod,
+ final boolean tryUgiParameter) throws IOException {
+ final UserGroupInformation ugi;
+ final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter);
+
if(UserGroupInformation.isSecurityEnabled()) {
- String user = request.getRemoteUser();
+ final String user = request.getRemoteUser();
String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME);
if (tokenString != null) {
Token<DelegationTokenIdentifier> token =
@@ -450,6 +463,7 @@ public class JspHelper {
DelegationTokenIdentifier id = new DelegationTokenIdentifier();
id.readFields(in);
ugi = id.getUser();
+ checkUsername(ugi.getUserName(), user);
ugi.addToken(token);
ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
} else {
@@ -460,16 +474,15 @@ public class JspHelper {
ugi = UserGroupInformation.createRemoteUser(user);
// This is not necessarily true, could have been auth'ed by user-facing
// filter
- ugi.setAuthenticationMethod(AuthenticationMethod.KERBEROS_SSL);
+ ugi.setAuthenticationMethod(secureAuthMethod);
}
+
+ checkUsername(user, usernameFromQuery);
+
} else { // Security's not on, pull from url
- String user = request.getParameter("ugi");
-
- if(user == null) { // not specified in request
- ugi = getDefaultWebUser(conf);
- } else {
- ugi = UserGroupInformation.createRemoteUser(user.split(",")[0]);
- }
+ ugi = usernameFromQuery == null?
+ getDefaultWebUser(conf) // not specified in request
+ : UserGroupInformation.createRemoteUser(usernameFromQuery);
ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
}
@@ -478,6 +491,27 @@ public class JspHelper {
return ugi;
}
+ private static void checkUsername(final String expected, final String name
+ ) throws IOException {
+ if (name != null && !name.equals(expected)) {
+ throw new IOException("Usernames not matched: name=" + name
+ + " != expected=" + expected);
+ }
+ }
+
+ private static String getUsernameFromQuery(final HttpServletRequest request,
+ final boolean tryUgiParameter) {
+ String username = request.getParameter(UserParam.NAME);
+ if (username == null && tryUgiParameter) {
+ //try ugi parameter
+ final String ugiStr = request.getParameter("ugi");
+ if (ugiStr != null) {
+ username = ugiStr.split(",")[0];
+ }
+ }
+ return username;
+ }
+
public static DFSClient getDFSClient(final UserGroupInformation user,
final InetSocketAddress addr,
final Configuration conf
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java Fri Sep 16 15:04:26 2011
@@ -65,6 +65,7 @@ import org.apache.hadoop.hdfs.server.pro
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
+import org.apache.hadoop.hdfs.web.AuthFilter;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.resources.Param;
import org.apache.hadoop.http.HttpServer;
@@ -354,7 +355,25 @@ public class NameNode implements ClientP
int infoPort = infoSocAddr.getPort();
httpServer = new HttpServer("hdfs", infoHost, infoPort,
infoPort == 0, conf,
- SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN));
+ SecurityUtil.getAdminAcls(conf, DFSConfigKeys.DFS_ADMIN)) {
+ {
+ if (conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY,
+ DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT)) {
+ //add SPNEGO authentication filter for webhdfs
+ final String name = "SPNEGO";
+ final String classname = AuthFilter.class.getName();
+ final String pathSpec = "/" + WebHdfsFileSystem.PATH_PREFIX + "/*";
+ defineFilter(webAppContext, name, classname, null,
+ new String[]{pathSpec});
+ LOG.info("Added filter '" + name + "' (class=" + classname + ")");
+
+ // add webhdfs packages
+ addJerseyResourcePackage(
+ NamenodeWebHdfsMethods.class.getPackage().getName()
+ + ";" + Param.class.getPackage().getName(), pathSpec);
+ }
+ }
+ };
boolean certSSL = conf.getBoolean("dfs.https.enable", false);
boolean useKrb = UserGroupInformation.isSecurityEnabled();
@@ -399,15 +418,6 @@ public class NameNode implements ClientP
FileChecksumServlets.RedirectServlet.class, false);
httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
ContentSummaryServlet.class, false);
-
- if (conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY,
- DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT)) {
- httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class
- .getPackage().getName()
- + ";"
- + Param.class.getPackage().getName(), "/"
- + WebHdfsFileSystem.PATH_PREFIX + "/*");
- }
httpServer.start();
// The web-server port can be ephemeral... ensure we have the correct info
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Fri Sep 16 15:04:26 2011
@@ -24,8 +24,10 @@ import java.io.OutputStream;
import java.io.PrintStream;
import java.net.URI;
import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
@@ -47,6 +49,7 @@ import org.apache.hadoop.hdfs.protocol.D
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.hdfs.server.namenode.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.web.JsonUtil;
@@ -54,6 +57,7 @@ import org.apache.hadoop.hdfs.web.WebHdf
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
+import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DstPathParam;
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
@@ -71,6 +75,13 @@ import org.apache.hadoop.hdfs.web.resour
import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
+import org.apache.hadoop.hdfs.web.resources.UserParam;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
/** Web-hdfs NameNode implementation. */
@Path("")
@@ -78,6 +89,7 @@ public class NamenodeWebHdfsMethods {
private static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
private @Context ServletContext context;
+ private @Context HttpServletRequest request;
private static DatanodeInfo chooseDatanode(final NameNode namenode,
final String path, final HttpOpParam.Op op, final long openOffset
@@ -104,11 +116,40 @@ public class NamenodeWebHdfsMethods {
return namenode.getNamesystem().getRandomDatanode();
}
- private static URI redirectURI(final NameNode namenode,
+ private Token<? extends TokenIdentifier> generateDelegationToken(
+ final NameNode namenode, final UserGroupInformation ugi,
+ final String renewer) throws IOException {
+ final Credentials c = DelegationTokenSecretManager.createCredentials(
+ namenode, ugi, request.getUserPrincipal().getName());
+ final Token<? extends TokenIdentifier> t = c.getAllTokens().iterator().next();
+ t.setService(new Text(SecurityUtil.buildDTServiceName(
+ NameNode.getUri(namenode.getNameNodeAddress()),
+ NameNode.DEFAULT_PORT)));
+ return t;
+ }
+
+ private URI redirectURI(final NameNode namenode,
+ final UserGroupInformation ugi, final DelegationParam delegation,
final String path, final HttpOpParam.Op op, final long openOffset,
final Param<?, ?>... parameters) throws URISyntaxException, IOException {
final DatanodeInfo dn = chooseDatanode(namenode, path, op, openOffset);
- final String query = op.toQueryString() + Param.toSortedString("&", parameters);
+
+ final String delegationQuery;
+ if (!UserGroupInformation.isSecurityEnabled()) {
+ //security disabled
+ delegationQuery = "";
+ } else if (delegation.getValue() != null) {
+ //client has provided a token
+ delegationQuery = "&" + delegation;
+ } else {
+ //generate a token
+ final Token<? extends TokenIdentifier> t = generateDelegationToken(
+ namenode, ugi, request.getUserPrincipal().getName());
+ delegationQuery = "&" + new DelegationParam(t.encodeToUrlString());
+ }
+ final String query = op.toQueryString()
+ + '&' + new UserParam(ugi) + delegationQuery
+ + Param.toSortedString("&", parameters);
final String uripath = "/" + WebHdfsFileSystem.PATH_PREFIX + path;
final URI uri = new URI("http", null, dn.getHostName(), dn.getInfoPort(),
@@ -126,6 +167,9 @@ public class NamenodeWebHdfsMethods {
@Produces({MediaType.APPLICATION_JSON})
public Response put(
final InputStream in,
+ @Context final UserGroupInformation ugi,
+ @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
+ final DelegationParam delegation,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
final PutOpParam op,
@@ -149,22 +193,27 @@ public class NamenodeWebHdfsMethods {
final ModificationTimeParam modificationTime,
@QueryParam(AccessTimeParam.NAME) @DefaultValue(AccessTimeParam.DEFAULT)
final AccessTimeParam accessTime
- ) throws IOException, URISyntaxException {
+ ) throws IOException, URISyntaxException, InterruptedException {
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ": " + path
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ Param.toSortedString(", ", dstPath, owner, group, permission,
overwrite, bufferSize, replication, blockSize,
modificationTime, accessTime));
}
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException, URISyntaxException {
+
final String fullpath = path.getAbsolutePath();
final NameNode namenode = (NameNode)context.getAttribute("name.node");
switch(op.getValue()) {
case CREATE:
{
- final URI uri = redirectURI(namenode, fullpath, op.getValue(), -1L,
+ final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
+ op.getValue(), -1L,
permission, overwrite, bufferSize, replication, blockSize);
return Response.temporaryRedirect(uri).build();
}
@@ -204,6 +253,8 @@ public class NamenodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+ }
+ });
}
/** Handle HTTP POST request. */
@@ -213,31 +264,40 @@ public class NamenodeWebHdfsMethods {
@Produces({MediaType.APPLICATION_JSON})
public Response post(
final InputStream in,
+ @Context final UserGroupInformation ugi,
+ @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
+ final DelegationParam delegation,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
final PostOpParam op,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
- ) throws IOException, URISyntaxException {
+ ) throws IOException, URISyntaxException, InterruptedException {
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ": " + path
- + Param.toSortedString(", ", bufferSize));
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ + Param.toSortedString(", ", bufferSize));
}
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException, URISyntaxException {
+
final String fullpath = path.getAbsolutePath();
final NameNode namenode = (NameNode)context.getAttribute("name.node");
switch(op.getValue()) {
case APPEND:
{
- final URI uri = redirectURI(namenode, fullpath, op.getValue(), -1L,
- bufferSize);
+ final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
+ op.getValue(), -1L, bufferSize);
return Response.temporaryRedirect(uri).build();
}
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+ }
+ });
}
private static final UriFsPathParam ROOT = new UriFsPathParam("");
@@ -247,6 +307,9 @@ public class NamenodeWebHdfsMethods {
@Path("/")
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response root(
+ @Context final UserGroupInformation ugi,
+ @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
+ final DelegationParam delegation,
@QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
final GetOpParam op,
@QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT)
@@ -255,8 +318,8 @@ public class NamenodeWebHdfsMethods {
final LengthParam length,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
- ) throws IOException, URISyntaxException {
- return get(ROOT, op, offset, length, bufferSize);
+ ) throws IOException, URISyntaxException, InterruptedException {
+ return get(ugi, delegation, ROOT, op, offset, length, bufferSize);
}
/** Handle HTTP GET request. */
@@ -264,6 +327,9 @@ public class NamenodeWebHdfsMethods {
@Path("{" + UriFsPathParam.NAME + ":.*}")
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
public Response get(
+ @Context final UserGroupInformation ugi,
+ @QueryParam(DelegationParam.NAME) @DefaultValue(DelegationParam.DEFAULT)
+ final DelegationParam delegation,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
final GetOpParam op,
@@ -273,21 +339,26 @@ public class NamenodeWebHdfsMethods {
final LengthParam length,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
- ) throws IOException, URISyntaxException {
+ ) throws IOException, URISyntaxException, InterruptedException {
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ", " + path
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ Param.toSortedString(", ", offset, length, bufferSize));
}
+
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException, URISyntaxException {
+
final NameNode namenode = (NameNode)context.getAttribute("name.node");
final String fullpath = path.getAbsolutePath();
switch(op.getValue()) {
case OPEN:
{
- final URI uri = redirectURI(namenode, fullpath, op.getValue(),
- offset.getValue(), offset, length, bufferSize);
+ final URI uri = redirectURI(namenode, ugi, delegation, fullpath,
+ op.getValue(), offset.getValue(), offset, length, bufferSize);
return Response.temporaryRedirect(uri).build();
}
case GETFILESTATUS:
@@ -304,6 +375,8 @@ public class NamenodeWebHdfsMethods {
default:
throw new UnsupportedOperationException(op + " is not supported");
}
+ }
+ });
}
private static DirectoryListing getDirectoryListing(final NameNode np,
@@ -353,28 +426,36 @@ public class NamenodeWebHdfsMethods {
@Path("{path:.*}")
@Produces(MediaType.APPLICATION_JSON)
public Response delete(
+ @Context final UserGroupInformation ugi,
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT)
final DeleteOpParam op,
@QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT)
final RecursiveParam recursive
- ) throws IOException {
+ ) throws IOException, InterruptedException {
if (LOG.isTraceEnabled()) {
- LOG.trace(op + ", " + path
- + Param.toSortedString(", ", recursive));
+ LOG.trace(op + ": " + path + ", ugi=" + ugi
+ + Param.toSortedString(", ", recursive));
}
- switch(op.getValue()) {
- case DELETE:
- final NameNode namenode = (NameNode)context.getAttribute("name.node");
- final String fullpath = path.getAbsolutePath();
- final boolean b = namenode.delete(fullpath, recursive.getValue());
- final String js = JsonUtil.toJsonString(DeleteOpParam.Op.DELETE, b);
- return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
-
- default:
- throw new UnsupportedOperationException(op + " is not supported");
- }
+ return ugi.doAs(new PrivilegedExceptionAction<Response>() {
+ @Override
+ public Response run() throws IOException {
+ final NameNode namenode = (NameNode)context.getAttribute("name.node");
+ final String fullpath = path.getAbsolutePath();
+
+ switch(op.getValue()) {
+ case DELETE:
+ {
+ final boolean b = namenode.delete(fullpath, recursive.getValue());
+ final String js = JsonUtil.toJsonString(DeleteOpParam.Op.DELETE, b);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+ }
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+ });
}
}
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java?rev=1171613&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/AuthFilter.java Fri Sep 16 15:04:26 2011
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web;
+
+import java.util.Map;
+import java.util.Properties;
+
+import javax.servlet.FilterConfig;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+
+/**
+ * Subclass of {@link AuthenticationFilter} that
+ * obtains Hadoop-Auth configuration for webhdfs.
+ */
+public class AuthFilter extends AuthenticationFilter {
+ private static final String CONF_PREFIX = "dfs.web.authentication.";
+
+ /**
+ * Returns the filter configuration properties,
+ * including the ones prefixed with {@link #CONF_PREFIX}.
+ * The prefix is removed from the returned property names.
+ *
+ * @param prefix parameter not used.
+ * @param config parameter not used.
+ * @return Hadoop-Auth configuration properties.
+ */
+ @Override
+ protected Properties getConfiguration(String prefix, FilterConfig config) {
+ final Configuration conf = new Configuration();
+ final Properties p = new Properties();
+
+ //set authentication type
+ p.setProperty(AUTH_TYPE, UserGroupInformation.isSecurityEnabled()?
+ KerberosAuthenticationHandler.TYPE: PseudoAuthenticationHandler.TYPE);
+ //For Pseudo Authentication, allow anonymous.
+ p.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
+ //set cookie path
+ p.setProperty(COOKIE_PATH, "/");
+
+ //set other configurations with CONF_PREFIX
+ for (Map.Entry<String, String> entry : conf) {
+ final String key = entry.getKey();
+ if (key.startsWith(CONF_PREFIX)) {
+ //remove prefix from the key and set property
+ p.setProperty(key.substring(CONF_PREFIX.length()), conf.get(key));
+ }
+ }
+ return p;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator.java?rev=1171613&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/KerberosUgiAuthenticator.java Fri Sep 16 15:04:26 2011
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web;
+
+import java.io.IOException;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
+import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
+
+/**
+ * Use UserGroupInformation as a fallback authenticator
+ * if the server does not use Kerberos SPNEGO HTTP authentication.
+ */
+public class KerberosUgiAuthenticator extends KerberosAuthenticator {
+ @Override
+ protected Authenticator getFallBackAuthenticator() {
+ return new PseudoAuthenticator() {
+ @Override
+ protected String getUserName() {
+ try {
+ return UserGroupInformation.getLoginUser().getUserName();
+ } catch (IOException e) {
+ throw new SecurityException("Failed to obtain current username", e);
+ }
+ }
+ };
+ }
+}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Fri Sep 16 15:04:26 2011
@@ -29,6 +29,7 @@ import java.net.URISyntaxException;
import java.net.URL;
import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -58,8 +59,12 @@ import org.apache.hadoop.hdfs.web.resour
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
+import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.util.Progressable;
import org.mortbay.util.ajax.JSON;
@@ -70,9 +75,23 @@ public class WebHdfsFileSystem extends H
/** Http URI: http://namenode:port/{PATH_PREFIX}/path/to/file */
public static final String PATH_PREFIX = SCHEME;
+ private static final KerberosUgiAuthenticator AUTH = new KerberosUgiAuthenticator();
+
+ private UserGroupInformation ugi;
+ private final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
protected Path workingDir;
@Override
+ public synchronized void initialize(URI uri, Configuration conf
+ ) throws IOException {
+ super.initialize(uri, conf);
+ setConf(conf);
+
+ ugi = UserGroupInformation.getCurrentUser();
+ this.workingDir = getHomeDirectory();
+ }
+
+ @Override
public URI getUri() {
try {
return new URI(SCHEME, null, nnAddr.getHostName(), nnAddr.getPort(),
@@ -84,9 +103,6 @@ public class WebHdfsFileSystem extends H
@Override
public synchronized Path getWorkingDirectory() {
- if (workingDir == null) {
- workingDir = getHomeDirectory();
- }
return workingDir;
}
@@ -157,8 +173,9 @@ public class WebHdfsFileSystem extends H
final String path = "/" + PATH_PREFIX
+ makeQualified(fspath).toUri().getPath();
final String query = op.toQueryString()
+ + '&' + new UserParam(ugi)
+ Param.toSortedString("&", parameters);
- final URL url = getNamenodeURL(path, query);
+ final URL url = getNamenodeURL(path, updateQuery(query));
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
@@ -170,7 +187,12 @@ public class WebHdfsFileSystem extends H
final URL url = toUrl(op, fspath, parameters);
//connect and get response
- final HttpURLConnection conn = (HttpURLConnection)url.openConnection();
+ final HttpURLConnection conn;
+ try {
+ conn = new AuthenticatedURL(AUTH).openConnection(url, authToken);
+ } catch(AuthenticationException e) {
+ throw new IOException("Authentication failed, url=" + url, e);
+ }
try {
conn.setRequestMethod(op.getType().toString());
conn.setDoOutput(op.getDoOutput());
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java Fri Sep 16 15:04:26 2011
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hdfs.web.resources;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
+
/** Block size parameter. */
public class BlockSizeParam extends LongParam {
/** Parameter name. */
@@ -46,4 +49,10 @@ public class BlockSizeParam extends Long
public String getName() {
return NAME;
}
+
+ /** @return the value or, if it is null, return the default from conf. */
+ public long getValue(final Configuration conf) {
+ return getValue() != null? getValue()
+ : conf.getLong("dfs.block.size", FSConstants.DEFAULT_BLOCK_SIZE);
+ }
}
\ No newline at end of file
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java Fri Sep 16 15:04:26 2011
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hdfs.web.resources;
+import org.apache.hadoop.conf.Configuration;
+
/** Buffer size parameter. */
public class BufferSizeParam extends IntegerParam {
/** Parameter name. */
@@ -46,4 +48,10 @@ public class BufferSizeParam extends Int
public String getName() {
return NAME;
}
+
+ /** @return the value or, if it is null, return the default from conf. */
+ public int getValue(final Configuration conf) {
+ return getValue() != null? getValue()
+ : conf.getInt("io.file.buffer.size", 4096);
+ }
}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DelegationParam.java?rev=1171613&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DelegationParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DelegationParam.java Fri Sep 16 15:04:26 2011
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+import org.apache.hadoop.hdfs.server.namenode.JspHelper;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/** Delegation token parameter. */
+public class DelegationParam extends StringParam {
+ /** Parameter name. */
+ public static final String NAME = JspHelper.DELEGATION_PARAMETER_NAME;
+ /** Default parameter value. */
+ public static final String DEFAULT = "";
+
+ private static final Domain DOMAIN = new Domain(NAME, null);
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public DelegationParam(final String str) {
+ super(DOMAIN, UserGroupInformation.isSecurityEnabled()
+ && str != null && !str.equals(DEFAULT)? str: null);
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Fri Sep 16 15:04:26 2011
@@ -18,7 +18,8 @@
package org.apache.hadoop.hdfs.web.resources;
/** Http operation parameter. */
-public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op> extends EnumParam<E> {
+public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
+ extends EnumParam<E> {
/** Default parameter value. */
public static final String DEFAULT = NULL;
@@ -32,20 +33,16 @@ public abstract class HttpOpParam<E exte
/** @return the Http operation type. */
public Type getType();
- /** @return true if the operation has output. */
+ /** @return true if the operation will do output. */
public boolean getDoOutput();
- /** @return true if the operation has output. */
+ /** @return true the expected http response code. */
public int getExpectedHttpResponseCode();
/** @return a URI query string. */
public String toQueryString();
}
- /**
- * Constructor.
- * @param str a string representation of the parameter value.
- */
HttpOpParam(final Domain<E> domain, final E value) {
super(domain, value);
}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserParam.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserParam.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserParam.java Fri Sep 16 15:04:26 2011
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hdfs.web.resources;
+import org.apache.hadoop.security.UserGroupInformation;
+
/** User parameter. */
public class UserParam extends StringParam {
/** Parameter name. */
@@ -34,6 +36,13 @@ public class UserParam extends StringPar
super(DOMAIN, str == null || str.equals(DEFAULT)? null: str);
}
+ /**
+ * Construct an object from a UGI.
+ */
+ public UserParam(final UserGroupInformation ugi) {
+ this(ugi.getShortUserName());
+ }
+
@Override
public String getName() {
return NAME;
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java Fri Sep 16 15:04:26 2011
@@ -17,12 +17,18 @@
*/
package org.apache.hadoop.hdfs.web.resources;
+import java.io.IOException;
import java.lang.reflect.Type;
-import java.security.Principal;
+import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.Context;
import javax.ws.rs.ext.Provider;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.namenode.JspHelper;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+
import com.sun.jersey.api.core.HttpContext;
import com.sun.jersey.core.spi.component.ComponentContext;
import com.sun.jersey.core.spi.component.ComponentScope;
@@ -30,33 +36,23 @@ import com.sun.jersey.server.impl.inject
import com.sun.jersey.spi.inject.Injectable;
import com.sun.jersey.spi.inject.InjectableProvider;
+/** Inject user information to http operations. */
@Provider
-public class UserProvider extends AbstractHttpContextInjectable<Principal>
+public class UserProvider
+ extends AbstractHttpContextInjectable<UserGroupInformation>
implements InjectableProvider<Context, Type> {
+ @Context HttpServletRequest request;
@Override
- public Principal getValue(final HttpContext context) {
- //get principal from the request
- final Principal principal = context.getRequest().getUserPrincipal();
- if (principal != null) {
- return principal;
- }
-
- //get username from the parameter
- final String username = context.getRequest().getQueryParameters().getFirst(
- UserParam.NAME);
- if (username != null) {
- final UserParam userparam = new UserParam(username);
- return new Principal() {
- @Override
- public String getName() {
- return userparam.getValue();
- }
- };
+ public UserGroupInformation getValue(final HttpContext context) {
+ final Configuration conf = (Configuration)context.getProperties().get(
+ JspHelper.CURRENT_CONF);
+ try {
+ return JspHelper.getUGI(request, conf,
+ AuthenticationMethod.KERBEROS, false);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
}
-
- //user not found
- return null;
}
@Override
@@ -65,9 +61,9 @@ public class UserProvider extends Abstra
}
@Override
- public Injectable<Principal> getInjectable(
+ public Injectable<UserGroupInformation> getInjectable(
final ComponentContext componentContext, final Context context,
final Type type) {
- return type.equals(Principal.class)? this : null;
+ return type.equals(UserGroupInformation.class)? this : null;
}
}
\ No newline at end of file
Modified: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java Fri Sep 16 15:04:26 2011
@@ -23,15 +23,20 @@ import static org.apache.hadoop.fs.FileS
import java.io.IOException;
import java.net.URI;
+import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSMainOperationsBaseTest;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.datanode.web.resources.DatanodeWebHdfsMethods;
import org.apache.hadoop.hdfs.web.resources.ExceptionHandler;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.Assert;
@@ -49,14 +54,30 @@ public class TestFSMainOperationsWebHdfs
@BeforeClass
public static void setupCluster() {
- Configuration conf = new Configuration();
+ final Configuration conf = new Configuration();
+ conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
try {
cluster = new MiniDFSCluster(conf, 2, true, null);
cluster.waitActive();
+ //change root permission to 777
+ cluster.getFileSystem().setPermission(
+ new Path("/"), new FsPermission((short)0777));
+
final String uri = WebHdfsFileSystem.SCHEME + "://"
+ conf.get("dfs.http.address");
- fSys = FileSystem.get(new URI(uri), conf);
+
+ //get file system as a non-superuser
+ final UserGroupInformation current = UserGroupInformation.getCurrentUser();
+ final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
+ current.getShortUserName() + "x", new String[]{"user"});
+ fSys = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ @Override
+ public FileSystem run() throws Exception {
+ return FileSystem.get(new URI(uri), conf);
+ }
+ });
+
defaultWorkingDirectory = fSys.getWorkingDirectory();
} catch (Exception e) {
throw new RuntimeException(e);
@@ -93,7 +114,11 @@ public class TestFSMainOperationsWebHdfs
} catch (IOException e) {
// expected
}
- Assert.assertFalse(exists(fSys, testSubDir));
+ try {
+ Assert.assertFalse(exists(fSys, testSubDir));
+ } catch(AccessControlException e) {
+ // also okay for HDFS.
+ }
Path testDeepSubDir = getTestRootPath(fSys, "test/hadoop/file/deep/sub/dir");
try {
@@ -102,6 +127,10 @@ public class TestFSMainOperationsWebHdfs
} catch (IOException e) {
// expected
}
- Assert.assertFalse(exists(fSys, testDeepSubDir));
+ try {
+ Assert.assertFalse(exists(fSys, testDeepSubDir));
+ } catch(AccessControlException e) {
+ // also okay for HDFS.
+ }
}
}
Modified: hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1171613&r1=1171612&r2=1171613&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Fri Sep 16 15:04:26 2011
@@ -21,11 +21,17 @@ package org.apache.hadoop.hdfs.web;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
+import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
private static final Configuration conf = new Configuration();
@@ -33,9 +39,14 @@ public class TestWebHdfsFileSystemContra
private String defaultWorkingDirectory;
static {
+ conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
try {
cluster = new MiniDFSCluster(conf, 2, true, null);
cluster.waitActive();
+
+ //change root permission to 777
+ cluster.getFileSystem().setPermission(
+ new Path("/"), new FsPermission((short)0777));
} catch (IOException e) {
throw new RuntimeException(e);
}
@@ -45,7 +56,18 @@ public class TestWebHdfsFileSystemContra
protected void setUp() throws Exception {
final String uri = WebHdfsFileSystem.SCHEME + "://"
+ conf.get("dfs.http.address");
- fs = FileSystem.get(new URI(uri), conf);
+
+ //get file system as a non-superuser
+ final UserGroupInformation current = UserGroupInformation.getCurrentUser();
+ final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
+ current.getShortUserName() + "x", new String[]{"user"});
+ fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ @Override
+ public FileSystem run() throws Exception {
+ return FileSystem.get(new URI(uri), conf);
+ }
+ });
+
defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath();
}
@@ -54,6 +76,45 @@ public class TestWebHdfsFileSystemContra
return defaultWorkingDirectory;
}
+ /** HDFS throws AccessControlException
+ * when calling exist(..) on a path /foo/bar/file
+ * but /foo/bar is indeed a file in HDFS.
+ */
+ @Override
+ public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
+ Path testDir = path("/test/hadoop");
+ assertFalse(fs.exists(testDir));
+ assertTrue(fs.mkdirs(testDir));
+ assertTrue(fs.exists(testDir));
+
+ createFile(path("/test/hadoop/file"));
+
+ Path testSubDir = path("/test/hadoop/file/subdir");
+ try {
+ fs.mkdirs(testSubDir);
+ fail("Should throw IOException.");
+ } catch (IOException e) {
+ // expected
+ }
+ try {
+ assertFalse(fs.exists(testSubDir));
+ } catch(AccessControlException e) {
+ // also okay for HDFS.
+ }
+
+ Path testDeepSubDir = path("/test/hadoop/file/deep/sub/dir");
+ try {
+ fs.mkdirs(testDeepSubDir);
+ fail("Should throw IOException.");
+ } catch (IOException e) {
+ // expected
+ }
+ try {
+ assertFalse(fs.exists(testDeepSubDir));
+ } catch(AccessControlException e) {
+ // also okay for HDFS.
+ }
+ }
//In trunk, testListStatusReturnsNullForNonExistentFile was replaced by
//testListStatusThrowsExceptionForNonExistentFile.