You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 04:56:46 UTC

svn commit: r1077254 - in /hadoop/common/branches/branch-0.20-security-patches/src: hdfs/org/apache/hadoop/hdfs/ hdfs/org/apache/hadoop/hdfs/server/namenode/ hdfs/org/apache/hadoop/hdfs/tools/ mapred/org/apache/hadoop/mapreduce/security/ mapred/org/apa...

Author: omalley
Date: Fri Mar  4 03:56:46 2011
New Revision: 1077254

URL: http://svn.apache.org/viewvc?rev=1077254&view=rev
Log:
commit 08a69ed7f8016953beb364c1a900d08e856dd8a3
Author: Devaraj Das <dd...@yahoo-inc.com>
Date:   Mon Mar 1 00:09:00 2010 -0800

    HDFS:1007 from https://issues.apache.org/jira/secure/attachment/12437458/distcp-hftp.2.patch
    
    +++ b/YAHOO-CHANGES.txt
    +    HDFS-1007. Makes HFTP and Distcp use kerberized SSL. (ddas)
    +

Modified:
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/TokenCache.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/token/DelegationTokenRenewal.java
    hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCp.java

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Fri Mar  4 03:56:46 2011
@@ -23,16 +23,17 @@ import java.io.InputStream;
 import java.io.IOException;
 
 import java.net.HttpURLConnection;
-import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 
+import java.security.PrivilegedExceptionAction;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Random;
 import java.util.TimeZone;
 
@@ -55,9 +56,14 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.server.namenode.JspHelper;
 import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
+import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.*;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Progressable;
 
 /** An implementation of a protocol for accessing filesystems over HTTP.
@@ -76,6 +82,8 @@ public class HftpFileSystem extends File
 
   public static final String HFTP_TIMEZONE = "UTC";
   public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
+  private Token<? extends TokenIdentifier> delegationToken;
+  public static final String HFTP_RENEWER = "fs.hftp.renewer";
 
   public static final SimpleDateFormat getDateFormat() {
     final SimpleDateFormat df = new SimpleDateFormat(HFTP_DATE_FORMAT);
@@ -91,14 +99,64 @@ public class HftpFileSystem extends File
     };
 
   @Override
-  public void initialize(URI name, Configuration conf) throws IOException {
+  public void initialize(URI name, final Configuration conf) throws IOException {
     super.initialize(name, conf);
     setConf(conf);
     this.ugi = UserGroupInformation.getCurrentUser();
 
     nnAddr = NetUtils.createSocketAddr(name.toString());
+    
+    if (UserGroupInformation.isSecurityEnabled()) {
+      StringBuffer sb = new StringBuffer();
+      final String nnServiceName = 
+        (sb.append(NetUtils.normalizeHostName(name.getHost()))
+                .append(":").append(name.getPort())).toString();
+      Text nnServiceNameText = new Text(nnServiceName);
+      Collection<Token<? extends TokenIdentifier>> tokens =
+        ugi.getTokens();
+      //try finding a token for this namenode (esp applicable for tasks
+      //using hftp). If there exists one, just set the delegationField
+      for (Token<? extends TokenIdentifier> t : tokens) {
+        if ((t.getService()).equals(nnServiceNameText)) {
+          delegationToken = t;
+          return;
+        }
+      }
+      //since we don't already have a token, go get one over https
+      try {
+        ugi.doAs(new PrivilegedExceptionAction<Object>() {
+          public Object run() throws IOException {
+            //try https (on http we NEVER get a delegation token)
+            String nnHttpUrl = "https://" + nnServiceName;
+            Credentials c;
+            try {
+              c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, 
+                  conf.get(HFTP_RENEWER));
+            } catch (Exception e) {
+              LOG.info("Couldn't get a delegation token from " + nnHttpUrl + 
+              " using https.");
+              //Maybe the server is in unsecure mode (that's bad but okay)
+              return null;
+            }
+            for (Token<? extends TokenIdentifier> t : c.getAllTokens()) {
+              //the service field is already set and so setService 
+              //is not required
+              delegationToken = t;
+              LOG.debug("Got dt for " + getUri() + ";t.service="
+                  +t.getService());
+            }
+            return null;
+          }
+        });
+      } catch (InterruptedException e) {
+        throw new RuntimeException(e);
+      }
+    }
   }
   
+  public Token<? extends TokenIdentifier> getDelegationToken() {
+    return delegationToken;
+  }
 
   @Override
   public URI getUri() {
@@ -118,6 +176,7 @@ public class HftpFileSystem extends File
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     try {
+      query = updateQuery(query);
       final URL url = new URI("http", null, nnAddr.getHostName(),
           nnAddr.getPort(), path, query, null).toURL();
       if (LOG.isTraceEnabled()) {
@@ -128,6 +187,17 @@ public class HftpFileSystem extends File
       throw (IOException)new IOException().initCause(e);
     }
   }
+  
+  protected String updateQuery(String query) throws IOException {
+    String tokenString = null;
+    if (UserGroupInformation.isSecurityEnabled()) {
+      if (delegationToken != null) {
+        tokenString = delegationToken.encodeToUrlString();
+        return (query + JspHelper.SET_DELEGATION + tokenString);
+      } // else we are talking to an unsecure cluster
+    }
+    return query;
+  }
 
   @Override
   public FSDataInputStream open(Path f, int buffersize) throws IOException {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java Fri Mar  4 03:56:46 2011
@@ -68,6 +68,7 @@ public class HsftpFileSystem extends Hft
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     try {
+      query = updateQuery(query);
       final URL url = new URI("https", null, nnAddr.getHostName(),
           nnAddr.getPort(), path, query, null).toURL();
       HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DelegationTokenServlet.java Fri Mar  4 03:56:46 2011
@@ -41,6 +41,7 @@ import org.apache.hadoop.security.token.
 public class DelegationTokenServlet extends DfsServlet {
   private static final Log LOG = LogFactory.getLog(DelegationTokenServlet.class);
   public static final String PATH_SPEC = "/getDelegationToken";
+  public static final String RENEWER = "renewer";
   
   @Override
   protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
@@ -59,6 +60,9 @@ public class DelegationTokenServlet exte
     LOG.info("Sending token: {" + ugi.getUserName() + "," + req.getRemoteAddr() +"}");
     final ServletContext context = getServletContext();
     final NameNode nn = (NameNode) context.getAttribute("name.node");
+    String renewer = req.getParameter(RENEWER);
+    final String renewerFinal = (renewer == null) ? 
+        req.getUserPrincipal().getName() : renewer;
     
     DataOutputStream dos = null;
     try {
@@ -69,7 +73,7 @@ public class DelegationTokenServlet exte
         public Void run() throws Exception {
           
           Token<DelegationTokenIdentifier> token = 
-            nn.getDelegationToken(new Text(req.getUserPrincipal().getName()));
+            nn.getDelegationToken(new Text(renewerFinal));
           String s = NameNode.getAddress(conf).getAddress().getHostAddress()
                      + ":" + NameNode.getAddress(conf).getPort();
           token.setService(new Text(s));

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Fri Mar  4 03:56:46 2011
@@ -427,6 +427,7 @@ public class JspHelper {
                                             Configuration conf
                                            ) throws IOException {
     UserGroupInformation ugi = null;
+    final String RANDOM_USER = "webuser1234";
     if(UserGroupInformation.isSecurityEnabled()) {
       String user = request.getRemoteUser();
       String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME);
@@ -434,6 +435,12 @@ public class JspHelper {
         Token<DelegationTokenIdentifier> token = 
           new Token<DelegationTokenIdentifier>();
         token.decodeFromUrlString(tokenString);
+        if (user == null) {
+          //this really doesn't break any security since we use the 
+          //delegation token for authentication in
+          //the back end.
+          user = RANDOM_USER;
+        }
         ugi = UserGroupInformation.createRemoteUser(user);
         ugi.addToken(token);        
         ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java Fri Mar  4 03:56:46 2011
@@ -267,13 +267,13 @@ public class NameNode implements ClientP
           httpServer.addInternalServlet("getimage", "/getimage", 
               GetImageServlet.class, true);
           httpServer.addInternalServlet("listPaths", "/listPaths/*", 
-              ListPathsServlet.class, true);
+              ListPathsServlet.class, false);
           httpServer.addInternalServlet("data", "/data/*", 
-              FileDataServlet.class, true);
+              FileDataServlet.class, false);
           httpServer.addInternalServlet("checksum", "/fileChecksum/*",
-              FileChecksumServlets.RedirectServlet.class, true);
+              FileChecksumServlets.RedirectServlet.class, false);
           httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
-              ContentSummaryServlet.class, true);
+              ContentSummaryServlet.class, false);
           httpServer.start();
       
           // The web-server port can be ephemeral... ensure we have the correct info

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java Fri Mar  4 03:56:46 2011
@@ -62,7 +62,7 @@ public class DelegationTokenFetcher {
       public Object run() throws Exception {
         
         if(args.length == 3 && "--webservice".equals(args[0])) {
-          getDTfromRemote(args[1], args[2]);
+          getDTfromRemoteIntoFile(args[1], args[2]);
           return null;
         }
         // avoid annoying mistake
@@ -127,43 +127,53 @@ public class DelegationTokenFetcher {
     ts.addToken(new Text(shortName), token);
     ts.write(out);
   }
-  
+  /**
+   * Utility method to obtain a delegation token over http
+   * @param nnHttpAddr Namenode http addr, such as http://namenode:50070
+   */
+  static public Credentials getDTfromRemote(String nnAddr, String renewer) 
+  throws IOException {
+    // Enable Kerberos sockets
+   System.setProperty("https.cipherSuites", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA");
+   DataOutputStream file = null;
+   DataInputStream dis = null;
+   
+   try {
+     StringBuffer url = new StringBuffer();
+     if (renewer != null) {
+       url.append(nnAddr).append(DelegationTokenServlet.PATH_SPEC).append("?").
+       append(DelegationTokenServlet.RENEWER).append("=").append(renewer);
+     } else {
+       url.append(nnAddr).append(DelegationTokenServlet.PATH_SPEC);
+     }
+     System.out.println("Retrieving token from: " + url);
+     URL remoteURL = new URL(url.toString());
+     URLConnection connection = remoteURL.openConnection();
+     
+     InputStream in = connection.getInputStream();
+     Credentials ts = new Credentials();
+     dis = new DataInputStream(in);
+     ts.readFields(dis);
+     return ts;
+   } catch (Exception e) {
+     throw new IOException("Unable to obtain remote token", e);
+   } finally {
+     if(dis != null) dis.close();
+     if(file != null) file.close();
+   }
+ }
   /**
    * Utility method to obtain a delegation token over http
    * @param nnHttpAddr Namenode http addr, such as http://namenode:50070
    * @param filename Name of file to store token in
    */
-   static private void getDTfromRemote(String nnAddr, String filename) 
-   throws IOException {
-     // Enable Kerberos sockets
-    System.setProperty("https.cipherSuites", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA");
-    String ugiPostfix = "";
-    DataOutputStream file = null;
-    DataInputStream dis = null;
-    
-    if(nnAddr.startsWith("http:"))
-      ugiPostfix = "?ugi=" + UserGroupInformation.getCurrentUser().getShortUserName();
-    
-    try {
-      System.out.println("Retrieving token from: " + 
-          nnAddr + DelegationTokenServlet.PATH_SPEC + ugiPostfix);
-      URL remoteURL = new URL(nnAddr + DelegationTokenServlet.PATH_SPEC + ugiPostfix);
-      URLConnection connection = remoteURL.openConnection();
-      
-      InputStream in = connection.getInputStream();
-      Credentials ts = new Credentials();
-      dis = new DataInputStream(in);
-      ts.readFields(dis);
-      file = new DataOutputStream(new FileOutputStream(filename));
-      ts.write(file);
-      file.flush();
-      System.out.println("Successfully wrote token of " + file.size() 
-          + " bytes  to " + filename);
-    } catch (Exception e) {
-      throw new IOException("Unable to obtain remote token", e);
-    } finally {
-      if(dis != null) dis.close();
-      if(file != null) file.close();
-    }
+  static private void getDTfromRemoteIntoFile(String nnAddr, String filename) 
+  throws IOException {
+    Credentials ts = getDTfromRemote(nnAddr, null); 
+    DataOutputStream file = new DataOutputStream(new FileOutputStream(filename));
+    ts.write(file);
+    file.flush();
+    System.out.println("Successfully wrote token of " + file.size() 
+        + " bytes  to " + filename);
   }
 }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/TokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/TokenCache.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/TokenCache.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/TokenCache.java Fri Mar  4 03:56:46 2011
@@ -29,8 +29,10 @@ import org.apache.hadoop.fs.FSDataInputS
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.HftpFileSystem;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobTracker;
@@ -85,7 +87,6 @@ public class TokenCache {
   throws IOException {
     // get jobtracker principal id (for the renewer)
     Text jtCreds = new Text(conf.get(JobTracker.JT_USER_NAME, ""));
-
     for(Path p: ps) {
       FileSystem fs = FileSystem.get(p.toUri(), conf);
       if(fs instanceof DistributedFileSystem) {
@@ -107,8 +108,20 @@ public class TokenCache {
 
         token.setService(new Text(fs_addr));
         credentials.addToken(new Text(fs_addr), token);
-        LOG.info("getting dt for " + p.toString() + ";uri="+ fs_addr + 
+        LOG.info("Got dt for " + p.toString() + ";uri="+ fs_addr + 
             ";t.service="+token.getService());
+      } else if (fs instanceof HftpFileSystem) {
+        String fs_addr = buildDTServiceName(fs.getUri());
+        Token<DelegationTokenIdentifier> token = 
+          TokenCache.getDelegationToken(credentials, fs_addr); 
+        if(token != null) {
+          LOG.debug("DT for " + token.getService()  + " is already present");
+          continue;
+        }
+        //the initialize method of hftp, called via FileSystem.get() done
+        //earlier gets a delegation token
+        credentials.addToken(new Text(fs_addr), 
+            ((HftpFileSystem) fs).getDelegationToken());
       }
     }
   }
@@ -186,7 +199,7 @@ public class TokenCache {
    * @param uri
    * @return "ip:port"
    */
-  static String buildDTServiceName(URI uri) {
+  public static String buildDTServiceName(URI uri) {
     int port = uri.getPort();
     if(port == -1) 
       port = NameNode.DEFAULT_PORT;

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/token/DelegationTokenRenewal.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/token/DelegationTokenRenewal.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/token/DelegationTokenRenewal.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapreduce/security/token/DelegationTokenRenewal.java Fri Mar  4 03:56:46 2011
@@ -25,8 +25,10 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Date;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Set;
 import java.util.Timer;
 import java.util.TimerTask;
 
@@ -94,13 +96,10 @@ public class DelegationTokenRenewal {
   
   //managing the list of tokens using Map
   // jobId=>List<tokens>
-  private static List<DelegationTokenToRenew> delegationTokens = 
-    Collections.synchronizedList(new ArrayList<DelegationTokenToRenew>());
+  private static Set<DelegationTokenToRenew> delegationTokens = 
+    Collections.synchronizedSet(new HashSet<DelegationTokenToRenew>());
   //adding token
   private static void addTokenToList(DelegationTokenToRenew t) {
-    //check to see if the token already exists in the list
-    if (delegationTokens.contains(t))
-      return;
     delegationTokens.add(t);
   }
   

Modified: hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCp.java?rev=1077254&r1=1077253&r2=1077254&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/tools/org/apache/hadoop/tools/DistCp.java Fri Mar  4 03:56:46 2011
@@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.HftpFileSystem;
 import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.SequenceFile;
@@ -57,6 +58,7 @@ import org.apache.hadoop.mapred.InputSpl
 import org.apache.hadoop.mapred.InvalidInputException;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobTracker;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.RecordReader;
@@ -627,6 +629,9 @@ public class DistCp implements Tool {
     List<IOException> rslt = new ArrayList<IOException>();
     
     // get tokens for all the required FileSystems..
+    // also set the renewer as the JobTracker for the hftp case
+    conf.set(HftpFileSystem.HFTP_RENEWER, 
+        conf.get(JobTracker.JT_USER_NAME, ""));
     Path[] ps = new Path[srcPaths.size()];
     ps = srcPaths.toArray(ps);
     TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), ps, conf);