You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 04:56:09 UTC
svn commit: r1077249 - in
/hadoop/common/branches/branch-0.20-security-patches/src:
core/org/apache/hadoop/http/HttpServer.java
hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
Author: omalley
Date: Fri Mar 4 03:56:09 2011
New Revision: 1077249
URL: http://svn.apache.org/viewvc?rev=1077249&view=rev
Log:
commit e2efc827143cd51bd963fcb2fd9a0cc358993495
Author: Devaraj Das <dd...@yahoo-inc.com>
Date: Sat Feb 27 03:19:02 2010 -0800
HADOOP:6584 from https://issues.apache.org/jira/secure/attachment/12437337/HADOOP-6584-Y20-4.patch
+++ b/YAHOO-CHANGES.txt
+ HADOOP-6584. Adds KrbSSL connector for jetty. (Jakob Homan via ddas)
+
Modified:
hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java?rev=1077249&r1=1077248&r2=1077249&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java Fri Mar 4 03:56:09 2011
@@ -44,7 +44,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.log.LogLevel;
+import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -130,8 +132,17 @@ public class HttpServer implements Filte
webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
webServer.addHandler(webAppContext);
+<<<<<<< HEAD:src/core/org/apache/hadoop/http/HttpServer.java
+ addDefaultApps(contexts, appDir);
+
+ defineFilter(webAppContext, "krb5Filter",
+ Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(),
+ null, null);
+
+=======
addDefaultApps(contexts, appDir, conf);
+>>>>>>> yahoo-hadoop-0.20.1xx:src/core/org/apache/hadoop/http/HttpServer.java
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
@@ -252,7 +263,7 @@ public class HttpServer implements Filte
*/
public void addServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
- addInternalServlet(name, pathSpec, clazz);
+ addInternalServlet(name, pathSpec, clazz, false);
addFilterPathMapping(pathSpec, webAppContext);
}
@@ -266,11 +277,38 @@ public class HttpServer implements Filte
@Deprecated
public void addInternalServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
+ addInternalServlet(name, pathSpec, clazz, false);
+ }
+
+ /**
+ * Add an internal servlet in the server, specifying whether or not to
+ * protect with Kerberos authentication.
+ * Note: This method is to be used for adding servlets that facilitate
+ * internal communication and not for user facing functionality. For
+ * servlets added using this method, filters (except internal Kerberized
+ * filters) are not enabled.
+ *
+ * @param name The name of the servlet (can be passed as null)
+ * @param pathSpec The path spec for the servlet
+ * @param clazz The servlet class
+ */
+ public void addInternalServlet(String name, String pathSpec,
+ Class<? extends HttpServlet> clazz, boolean requireAuth) {
ServletHolder holder = new ServletHolder(clazz);
if (name != null) {
holder.setName(name);
}
webAppContext.addServlet(holder, pathSpec);
+
+ if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
+ LOG.info("Adding Kerberos filter to " + name);
+ ServletHandler handler = webAppContext.getServletHandler();
+ FilterMapping fmap = new FilterMapping();
+ fmap.setPathSpec(pathSpec);
+ fmap.setFilterName("krb5Filter");
+ fmap.setDispatches(Handler.ALL);
+ handler.addFilterMapping(fmap);
+ }
}
/** {@inheritDoc} */
@@ -408,10 +446,22 @@ public class HttpServer implements Filte
*/
public void addSslListener(InetSocketAddress addr, Configuration sslConf,
boolean needClientAuth) throws IOException {
+ addSslListener(addr, sslConf, needClientAuth, false);
+ }
+
+ /**
+ * Configure an ssl listener on the server.
+ * @param addr address to listen on
+ * @param sslConf conf to retrieve ssl options
+ * @param needClientAuth whether client authentication is required
+ * @param needKrbAuth whether to allow kerberos auth
+ */
+ public void addSslListener(InetSocketAddress addr, Configuration sslConf,
+ boolean needCertsAuth, boolean needKrbAuth) throws IOException {
if (webServer.isStarted()) {
throw new IOException("Failed to add ssl listener");
}
- if (needClientAuth) {
+ if (needCertsAuth) {
// setting up SSL truststore for authenticating clients
System.setProperty("javax.net.ssl.trustStore", sslConf.get(
"ssl.server.truststore.location", ""));
@@ -420,14 +470,22 @@ public class HttpServer implements Filte
System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
"ssl.server.truststore.type", "jks"));
}
- SslSocketConnector sslListener = new SslSocketConnector();
+ Krb5AndCertsSslSocketConnector.MODE mode;
+ if(needCertsAuth && needKrbAuth)
+ mode = MODE.BOTH;
+ else if (!needCertsAuth && needKrbAuth)
+ mode = MODE.KRB;
+ else // Default to certificates
+ mode = MODE.CERTS;
+
+ SslSocketConnector sslListener = new Krb5AndCertsSslSocketConnector(mode);
sslListener.setHost(addr.getHostName());
sslListener.setPort(addr.getPort());
sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
sslListener.setPassword(sslConf.get("ssl.server.keystore.password", ""));
sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword", ""));
sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks"));
- sslListener.setNeedClientAuth(needClientAuth);
+ sslListener.setNeedClientAuth(needCertsAuth);
webServer.addConnector(sslListener);
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1077249&r1=1077248&r2=1077249&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSConfigKeys.java Fri Mar 4 03:56:09 2011
@@ -196,4 +196,5 @@ public class DFSConfigKeys extends Commo
public static final String DFS_DATANODE_USER_NAME_KEY = "dfs.datanode.user.name.key";
public static final String DFS_NAMENODE_KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
public static final String DFS_NAMENODE_USER_NAME_KEY = "dfs.namenode.user.name.key";
+ public static final String DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.namenode.krb.https.user.name.key";
}
Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1077249&r1=1077248&r2=1077249&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java Fri Mar 4 03:56:09 2011
@@ -25,6 +25,7 @@ import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.hdfs.protocol.*;
@@ -61,6 +62,7 @@ import org.apache.hadoop.security.Refres
import java.io.*;
import java.net.*;
+import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.Iterator;
@@ -219,51 +221,77 @@ public class NameNode implements ClientP
this.emptier.start();
}
- private void startHttpServer(Configuration conf) throws IOException {
- String infoAddr =
- NetUtils.getServerAddress(conf, "dfs.info.bindAddress",
- "dfs.info.port", "dfs.http.address");
- InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
- String infoHost = infoSocAddr.getHostName();
- int infoPort = infoSocAddr.getPort();
- this.httpServer = new HttpServer("hdfs", infoHost, infoPort,
- infoPort == 0, conf);
- if (conf.getBoolean("dfs.https.enable", false)) {
- boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
- InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
- "dfs.https.address", infoHost + ":" + 0));
- Configuration sslConf = new Configuration(false);
- sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
- "ssl-server.xml"));
- this.httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
- // assume same ssl port for all datanodes
- InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
- "dfs.datanode.https.address", infoHost + ":" + 50475));
- this.httpServer.setAttribute("datanode.https.port", datanodeSslPort
- .getPort());
- }
- this.httpServer.setAttribute("name.node", this);
- this.httpServer.setAttribute("name.node.address", getNameNodeAddress());
- this.httpServer.setAttribute("name.system.image", getFSImage());
- this.httpServer.setAttribute("name.conf", conf);
- this.httpServer.addInternalServlet("getDelegationToken",
- DelegationTokenServlet.PATH_SPEC, DelegationTokenServlet.class);
- this.httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class);
- this.httpServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class);
- this.httpServer.addInternalServlet("listPaths", "/listPaths/*", ListPathsServlet.class);
- this.httpServer.addInternalServlet("data", "/data/*", FileDataServlet.class);
- this.httpServer.addInternalServlet("checksum", "/fileChecksum/*",
- FileChecksumServlets.RedirectServlet.class);
- this.httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
- ContentSummaryServlet.class);
- this.httpServer.start();
-
- // The web-server port can be ephemeral... ensure we have the correct info
- infoPort = this.httpServer.getPort();
- this.httpAddress = new InetSocketAddress(infoHost, infoPort);
- conf.set("dfs.http.address", infoHost + ":" + infoPort);
- LOG.info("Web-server up at: " + infoHost + ":" + infoPort);
- }
+ private void startHttpServer(final Configuration conf) throws IOException {
+ // Kerberized SSL servers must be run from the host principal...
+ DFSUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
+ DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY);
+ UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+ try {
+ this.httpServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
+ @Override
+ public HttpServer run() throws IOException, InterruptedException {
+ String infoAddr =
+ NetUtils.getServerAddress(conf, "dfs.info.bindAddress",
+ "dfs.info.port", "dfs.http.address");
+ InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
+ String infoHost = infoSocAddr.getHostName();
+ int infoPort = infoSocAddr.getPort();
+ httpServer = new HttpServer("hdfs", infoHost, infoPort,
+ infoPort == 0, conf);
+
+ boolean certSSL = conf.getBoolean("dfs.https.enable", false);
+ boolean useKrb = UserGroupInformation.isSecurityEnabled();
+ if (certSSL || useKrb) {
+ boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
+ InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoHost + ":"+ conf.get(
+ "dfs.https.port", infoHost + ":" + 0));
+ Configuration sslConf = new Configuration(false);
+ if(certSSL) {
+ sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
+ "ssl-server.xml"));
+ }
+ httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth, useKrb);
+ // assume same ssl port for all datanodes
+ InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
+ "dfs.datanode.https.address", infoHost + ":" + 50475));
+ httpServer.setAttribute("datanode.https.port", datanodeSslPort
+ .getPort());
+ }
+ httpServer.setAttribute("name.node", NameNode.this);
+ httpServer.setAttribute("name.node.address", getNameNodeAddress());
+ httpServer.setAttribute("name.system.image", getFSImage());
+ httpServer.setAttribute("name.conf", conf);
+ httpServer.addInternalServlet("getDelegationToken",
+ DelegationTokenServlet.PATH_SPEC, DelegationTokenServlet.class, true);
+ httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class, true);
+ httpServer.addInternalServlet("getimage", "/getimage",
+ GetImageServlet.class, true);
+ httpServer.addInternalServlet("listPaths", "/listPaths/*",
+ ListPathsServlet.class, true);
+ httpServer.addInternalServlet("data", "/data/*",
+ FileDataServlet.class, true);
+ httpServer.addInternalServlet("checksum", "/fileChecksum/*",
+ FileChecksumServlets.RedirectServlet.class, true);
+ httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
+ ContentSummaryServlet.class, true);
+ httpServer.start();
+
+ // The web-server port can be ephemeral... ensure we have the correct info
+ infoPort = httpServer.getPort();
+ httpAddress = new InetSocketAddress(infoHost, infoPort);
+ conf.set("dfs.http.address", infoHost + ":" + infoPort);
+ LOG.info("Web-server up at: " + infoHost + ":" + infoPort);
+ return httpServer;
+ }
+ });
+ } catch (InterruptedException e) {
+ throw new IOException(e);
+ } finally {
+ // Go back to being the correct Namenode principal
+ DFSUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
+ DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY);
+ }
+ }
private final static String KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
private final static String USER_NAME_KEY = "dfs.namenode.user.name.key";