You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by jg...@apache.org on 2010/07/09 20:36:10 UTC

svn commit: r962630 - in /hadoop/hdfs/trunk: CHANGES.txt src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

Author: jghoman
Date: Fri Jul  9 18:36:10 2010
New Revision: 962630

URL: http://svn.apache.org/viewvc?rev=962630&view=rev
Log:
HFDS-1045. In secure clusters, re-login is necessary for https clients before opening connections.

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=962630&r1=962629&r2=962630&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Fri Jul  9 18:36:10 2010
@@ -125,6 +125,9 @@ Trunk (unreleased changes)
 
     HDFS-1238. ant eclipse-files has drifted again, (jghoman)
 
+    HDFS-1045. In secure clusters, re-login is necessary for https 
+    clients before opening connections. (jghoman)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=962630&r1=962629&r2=962630&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSConfigKeys.java Fri Jul  9 18:36:10 2010
@@ -209,6 +209,7 @@ public class DFSConfigKeys extends Commo
   public static final String  DFS_NAMENODE_KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
   public static final String  DFS_NAMENODE_USER_NAME_KEY = "dfs.namenode.kerberos.principal";
   public static final String  DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.namenode.kerberos.https.principal";
+  public static final String  DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY = "dfs.secondary.namenode.keytab.file";
   public static final String  DFS_SECONDARY_NAMENODE_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.principal";
   public static final String  DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.https.principal";
   public static final String  DFS_NAMENODE_NAME_CACHE_THRESHOLD_KEY = "dfs.namenode.name.cache.threshold";

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java?rev=962630&r1=962629&r2=962630&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java Fri Jul  9 18:36:10 2010
@@ -27,6 +27,8 @@ import javax.servlet.http.HttpServletReq
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
@@ -48,7 +50,9 @@ public class GetImageServlet extends Htt
       ServletContext context = getServletContext();
       final FSImage nnImage = (FSImage)context.getAttribute("name.system.image");
       final TransferFsImage ff = new TransferFsImage(pmap, request, response);
-      UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() {
+      final Configuration conf = (Configuration)getServletContext().getAttribute("name.conf");
+
+     UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() {
 
         @Override
         public Void run() throws Exception {
@@ -67,12 +71,28 @@ public class GetImageServlet extends Htt
           } else if (ff.putImage()) {
             // issue a HTTP get request to download the new fsimage 
             nnImage.validateCheckpointUpload(ff.getToken());
-            TransferFsImage.getFileClient(ff.getInfoServer(), "getimage=1", 
-                nnImage.getFsImageNameCheckpoint());
-            nnImage.checkpointUploadDone();
+            reloginIfNecessary().doAs(new PrivilegedExceptionAction<Void>() {
+                @Override
+                public Void run() throws Exception {
+                  TransferFsImage.getFileClient(ff.getInfoServer(), "getimage=1", 
+                      nnImage.getFsImageNameCheckpoint());
+                  return null;
+                }
+            });
+           nnImage.checkpointUploadDone();
           }
           return null;
         }
+        
+        // We may have lost our ticket since the last time we tried to open
+        // an http connection, so log in just in case.
+        private UserGroupInformation reloginIfNecessary() throws IOException {
+          // This method is only called on the NN, therefore it is safe to
+          // use these key values.
+          return UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+              conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), 
+              conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
+        }       
       });
       
     } catch (Exception ie) {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=962630&r1=962629&r2=962630&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Fri Jul  9 18:36:10 2010
@@ -159,17 +159,16 @@ public class SecondaryNameNode implement
 
     // initialize the webserver for uploading files.
     // Kerberized SSL servers must be run from the host principal...
-    DFSUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
-        DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY);
-    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+    UserGroupInformation httpUGI = 
+      UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+          conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), 
+          conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY));
     try {
-      infoServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
-
+      infoServer = httpUGI.doAs(new PrivilegedExceptionAction<HttpServer>() {
         @Override
         public HttpServer run() throws IOException, InterruptedException {
           LOG.info("Starting web server as: " +
-              UserGroupInformation.getLoginUser().getUserName());
-
+              UserGroupInformation.getCurrentUser().getUserName());
           InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(
               conf.get(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
                        DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_DEFAULT));
@@ -200,14 +199,9 @@ public class SecondaryNameNode implement
       });
     } catch (InterruptedException e) {
       throw new RuntimeException(e);
-    } finally {
-      // Go back to being the correct Namenode principal
-      LOG.info("Web server init done, returning to: " + 
-          UserGroupInformation.getLoginUser().getUserName());
-      DFSUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
-          DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY);
-      
-    }
+    } 
+    
+    LOG.info("Web server init done");
 
     // The web-server port can be ephemeral... ensure we have the correct info
     infoPort = infoServer.getPort();
@@ -384,6 +378,9 @@ public class SecondaryNameNode implement
                             "after creating edits.new");
     }
 
+    // We may have lost our ticket since last checkpoint, log in again, just in case
+    if(UserGroupInformation.isSecurityEnabled())
+      UserGroupInformation.getCurrentUser().reloginFromKeytab();
     downloadCheckpointFiles(sig);   // Fetch fsimage and edits
     doMerge(sig);                   // Do the merge