You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by ar...@apache.org on 2013/10/02 02:09:09 UTC

svn commit: r1528282 - in /hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/server/namenode/ src/test/java/org/apache/hadoop/hdfs/

Author: arp
Date: Wed Oct  2 00:09:08 2013
New Revision: 1528282

URL: http://svn.apache.org/r1528282
Log:
HDFS-5255. Merging r1528279 from trunk to branch-2

Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Oct  2 00:09:08 2013
@@ -141,6 +141,9 @@ Release 2.1.2 - UNRELEASED
     HDFS-5265. Namenode fails to start when dfs.https.port is unspecified.
     (Haohui Mai via jing9)
 
+    HDFS-5255. Distcp job fails with hsftp when https is enabled in insecure
+    cluster. (Arpit Agarwal)
+
 Release 2.1.1-beta - 2013-09-23
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java Wed Oct  2 00:09:08 2013
@@ -94,7 +94,6 @@ public class HftpFileSystem extends File
   private URI hftpURI;
 
   protected URI nnUri;
-  protected URI nnSecureUri;
 
   public static final String HFTP_TIMEZONE = "UTC";
   public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
@@ -134,34 +133,33 @@ public class HftpFileSystem extends File
         DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
   }
 
-  protected int getDefaultSecurePort() {
-    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
-  }
-
+  /**
+   *  We generate the address with one of the following ports, in
+   *  order of preference.
+   *  1. Port from the hftp URI e.g. hftp://namenode:4000/ will return 4000.
+   *  2. Port configured via DFS_NAMENODE_HTTP_PORT_KEY
+   *  3. DFS_NAMENODE_HTTP_PORT_DEFAULT i.e. 50070.
+   *
+   * @param uri
+   * @return
+   */
   protected InetSocketAddress getNamenodeAddr(URI uri) {
     // use authority so user supplied uri can override port
     return NetUtils.createSocketAddr(uri.getAuthority(), getDefaultPort());
   }
 
-  protected InetSocketAddress getNamenodeSecureAddr(URI uri) {
-    // must only use the host and the configured https port
-    return NetUtils.createSocketAddrForHost(uri.getHost(), getDefaultSecurePort());
-  }
-
   protected URI getNamenodeUri(URI uri) {
-    return DFSUtil.createUri("http", getNamenodeAddr(uri));
-  }
-
-  protected URI getNamenodeSecureUri(URI uri) {
-    return DFSUtil.createUri("http", getNamenodeSecureAddr(uri));
+    return DFSUtil.createUri(getUnderlyingProtocol(), getNamenodeAddr(uri));
   }
 
+  /**
+   * See the documentation of {@Link #getNamenodeAddr(URI)} for the logic
+   * behind selecting the canonical service name.
+   * @return
+   */
   @Override
   public String getCanonicalServiceName() {
-    // unlike other filesystems, hftp's service is the secure port, not the
-    // actual port in the uri
-    return SecurityUtil.buildTokenService(nnSecureUri).toString();
+    return SecurityUtil.buildTokenService(nnUri).toString();
   }
   
   @Override
@@ -187,7 +185,6 @@ public class HftpFileSystem extends File
     setConf(conf);
     this.ugi = UserGroupInformation.getCurrentUser(); 
     this.nnUri = getNamenodeUri(name);
-    this.nnSecureUri = getNamenodeSecureUri(name);
     try {
       this.hftpURI = new URI(name.getScheme(), name.getAuthority(),
                              null, null, null);
@@ -225,7 +222,7 @@ public class HftpFileSystem extends File
 
   protected Token<DelegationTokenIdentifier> selectDelegationToken(
       UserGroupInformation ugi) {
-  	return hftpTokenSelector.selectToken(nnSecureUri, ugi.getTokens(), getConf());
+    return hftpTokenSelector.selectToken(nnUri, ugi.getTokens(), getConf());
   }
   
 
@@ -234,6 +231,13 @@ public class HftpFileSystem extends File
     return renewToken;
   }
 
+  /**
+   * Return the underlying protocol that is used to talk to the namenode.
+   */
+  protected String getUnderlyingProtocol() {
+    return "http";
+  }
+
   @Override
   public synchronized <T extends TokenIdentifier> void setDelegationToken(Token<T> token) {
     renewToken = token;
@@ -257,7 +261,7 @@ public class HftpFileSystem extends File
       return ugi.doAs(new PrivilegedExceptionAction<Token<?>>() {
         @Override
         public Token<?> run() throws IOException {
-          final String nnHttpUrl = nnSecureUri.toString();
+          final String nnHttpUrl = nnUri.toString();
           Credentials c;
           try {
             c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer);
@@ -301,7 +305,7 @@ public class HftpFileSystem extends File
    * @throws IOException on error constructing the URL
    */
   protected URL getNamenodeURL(String path, String query) throws IOException {
-    final URL url = new URL("http", nnUri.getHost(),
+    final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(),
           nnUri.getPort(), path + '?' + query);
     if (LOG.isTraceEnabled()) {
       LOG.trace("url=" + url);
@@ -703,17 +707,20 @@ public class HftpFileSystem extends File
       return true;
     }
 
+    protected String getUnderlyingProtocol() {
+      return "http";
+    }
+
     @SuppressWarnings("unchecked")
     @Override
     public long renew(Token<?> token, 
                       Configuration conf) throws IOException {
       // update the kerberos credentials, if they are coming from a keytab
       UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
-      // use http to renew the token
       InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
       return 
         DelegationTokenFetcher.renewDelegationToken
-        (DFSUtil.createUri("http", serviceAddr).toString(),
+        (DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr).toString(),
          (Token<DelegationTokenIdentifier>) token);
     }
 
@@ -723,10 +730,9 @@ public class HftpFileSystem extends File
                        Configuration conf) throws IOException {
       // update the kerberos credentials, if they are coming from a keytab
       UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
-      // use http to cancel the token
       InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
       DelegationTokenFetcher.cancelDelegationToken
-        (DFSUtil.createUri("http", serviceAddr).toString(),
+        (DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr).toString(),
          (Token<DelegationTokenIdentifier>) token);
     }    
   }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java Wed Oct  2 00:09:08 2013
@@ -68,6 +68,14 @@ public class HsftpFileSystem extends Hft
     return "hsftp";
   }
 
+  /**
+   * Return the underlying protocol that is used to talk to the namenode.
+   */
+  @Override
+  protected String getUnderlyingProtocol() {
+    return "https";
+  }
+
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
     super.initialize(name, conf);
@@ -134,24 +142,15 @@ public class HsftpFileSystem extends Hft
 
   @Override
   protected int getDefaultPort() {
-    return getDefaultSecurePort();
+    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
+                            DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
   }
 
   @Override
-  protected InetSocketAddress getNamenodeSecureAddr(URI uri) {
-    return getNamenodeAddr(uri);
-  }
-
-  @Override
-  protected URI getNamenodeUri(URI uri) {
-    return getNamenodeSecureUri(uri);
-  }
-  
-  @Override
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     query = addDelegationTokenParam(query);
-    final URL url = new URL("https", nnUri.getHost(), 
+    final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(),
         nnUri.getPort(), path + '?' + query);
     HttpsURLConnection conn;
     conn = (HttpsURLConnection)connectionFactory.openConnection(url);

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java Wed Oct  2 00:09:08 2013
@@ -57,9 +57,14 @@ public class FileChecksumServlets {
       final String hostname = host instanceof DatanodeInfo 
           ? ((DatanodeInfo)host).getHostName() : host.getIpAddr();
       final String scheme = request.getScheme();
-      final int port = "https".equals(scheme)
-          ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY)
-          : host.getInfoPort();
+      int port = host.getInfoPort();
+      if ("https".equals(scheme)) {
+        final Integer portObject = (Integer) getServletContext().getAttribute(
+            DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
+        if (portObject != null) {
+          port = portObject;
+        }
+      }
       final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");
 
       String dtParam = "";

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Wed Oct  2 00:09:08 2013
@@ -61,9 +61,14 @@ public class FileDataServlet extends Dfs
     } else {
       hostname = host.getIpAddr();
     }
-    final int port = "https".equals(scheme)
-      ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY)
-      : host.getInfoPort();
+    int port = host.getInfoPort();
+    if ("https".equals(scheme)) {
+      final Integer portObject = (Integer) getServletContext().getAttribute(
+          DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
+      if (portObject != null) {
+        port = portObject;
+      }
+    }
 
     String dtParam = "";
     if (dt != null) {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java Wed Oct  2 00:09:08 2013
@@ -88,19 +88,21 @@ public class TestHftpDelegationToken {
     URI fsUri = URI.create("hftp://localhost");
     MyHftpFileSystem fs = (MyHftpFileSystem) FileSystem.newInstance(fsUri, conf);
     assertEquals(httpPort, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpsPort, conf); // should still use secure port
+    checkTokenSelection(fs, httpPort, conf);
 
     // test with explicit default port
+    // Make sure it uses the port from the hftp URI.
     fsUri = URI.create("hftp://localhost:"+httpPort);
     fs = (MyHftpFileSystem) FileSystem.newInstance(fsUri, conf);
     assertEquals(httpPort, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpsPort, conf); // should still use secure port
+    checkTokenSelection(fs, httpPort, conf);
     
     // test with non-default port
+    // Make sure it uses the port from the hftp URI.
     fsUri = URI.create("hftp://localhost:"+(httpPort+1));
     fs = (MyHftpFileSystem) FileSystem.newInstance(fsUri, conf);
     assertEquals(httpPort+1, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpsPort, conf); // should still use secure port
+    checkTokenSelection(fs, httpPort + 1, conf);
     
     conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 5);
   }
@@ -178,7 +180,7 @@ public class TestHftpDelegationToken {
       }
       assertNotNull(ex);
       assertNotNull(ex.getCause());
-      assertEquals("Unexpected end of file from server",
+      assertEquals("Remote host closed connection during handshake",
                    ex.getCause().getMessage());
     } finally {
       t.interrupt();

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java?rev=1528282&r1=1528281&r2=1528282&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java Wed Oct  2 00:09:08 2013
@@ -294,11 +294,13 @@ public class TestHftpFileSystem {
     HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT, fs.getDefaultPort());
-    assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort());
 
     assertEquals(uri, fs.getUri());
+
+    // HFTP uses http to get the token so canonical service name should
+    // return the http port.
     assertEquals(
-        "127.0.0.1:"+DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT,
+        "127.0.0.1:" + DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT,
         fs.getCanonicalServiceName()
     );
   }
@@ -307,17 +309,18 @@ public class TestHftpFileSystem {
   public void testHftpCustomDefaultPorts() throws IOException {
     Configuration conf = new Configuration();
     conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123);
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456);
 
     URI uri = URI.create("hftp://localhost");
     HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(123, fs.getDefaultPort());
-    assertEquals(456, fs.getDefaultSecurePort());
     
     assertEquals(uri, fs.getUri());
+
+    // HFTP uses http to get the token so canonical service name should
+    // return the http port.
     assertEquals(
-        "127.0.0.1:456",
+        "127.0.0.1:123",
         fs.getCanonicalServiceName()
     );
   }
@@ -329,11 +332,10 @@ public class TestHftpFileSystem {
     HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT, fs.getDefaultPort());
-    assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort());
 
     assertEquals(uri, fs.getUri());
     assertEquals(
-        "127.0.0.1:"+DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT,
+        "127.0.0.1:123",
         fs.getCanonicalServiceName()
     );
   }
@@ -342,17 +344,15 @@ public class TestHftpFileSystem {
   public void testHftpCustomUriPortWithCustomDefaultPorts() throws IOException {
     Configuration conf = new Configuration();
     conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123);
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456);
 
     URI uri = URI.create("hftp://localhost:789");
     HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(123, fs.getDefaultPort());
-    assertEquals(456, fs.getDefaultSecurePort());
    
     assertEquals(uri, fs.getUri()); 
     assertEquals(
-        "127.0.0.1:456",
+        "127.0.0.1:789",
         fs.getCanonicalServiceName()
     );
   }
@@ -366,7 +366,6 @@ public class TestHftpFileSystem {
     HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultPort());
-    assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort());
 
     assertEquals(uri, fs.getUri());
     assertEquals(
@@ -385,7 +384,6 @@ public class TestHftpFileSystem {
     HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(456, fs.getDefaultPort());
-    assertEquals(456, fs.getDefaultSecurePort());
     
     assertEquals(uri, fs.getUri());
     assertEquals(
@@ -401,7 +399,6 @@ public class TestHftpFileSystem {
     HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultPort());
-    assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort());
 
     assertEquals(uri, fs.getUri());
     assertEquals(
@@ -420,7 +417,6 @@ public class TestHftpFileSystem {
     HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);
 
     assertEquals(456, fs.getDefaultPort());
-    assertEquals(456, fs.getDefaultSecurePort());
 
     assertEquals(uri, fs.getUri());
     assertEquals(