You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cd...@apache.org on 2008/11/08 00:37:17 UTC

svn commit: r712309 - in /hadoop/core/trunk: ./ conf/ src/core/org/apache/hadoop/http/ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/server/datanode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/tools/org/apache/hadoop/tools/

Author: cdouglas
Date: Fri Nov  7 15:37:16 2008
New Revision: 712309

URL: http://svn.apache.org/viewvc?rev=712309&view=rev
Log:
HADOOP-4453. Improve ssl configuration and handling in HsftpFileSystem,
particularly when used with DistCp. Contributed by Kan Zhang.

Added:
    hadoop/core/trunk/conf/ssl-client.xml.example
    hadoop/core/trunk/conf/ssl-server.xml.example
Removed:
    hadoop/core/trunk/conf/sslinfo.xml.example
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/conf/hadoop-default.xml
    hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri Nov  7 15:37:16 2008
@@ -67,6 +67,9 @@
     HADOOP-4187. Does a runtime lookup for JobConf/JobConfigurable, and if found,
     invokes the appropriate configure method. (Sharad Agarwal via ddas)
 
+    HADOOP-4453. Improve ssl configuration and handling in HsftpFileSystem,
+    particularly when used with DistCp. (Kan Zhang via cdouglas)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/core/trunk/conf/hadoop-default.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/conf/hadoop-default.xml?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/conf/hadoop-default.xml (original)
+++ hadoop/core/trunk/conf/hadoop-default.xml Fri Nov  7 15:37:16 2008
@@ -319,23 +319,45 @@
 </property>
 
 <property>
-  <name>dfs.datanode.https.address</name>
-  <value>0.0.0.0:50475</value>
+  <name>dfs.https.enable</name>
+  <value>false</value>
+  <description>Decide if HTTPS(SSL) is supported on HDFS
+  </description>
 </property>
 
 <property>
-  <name>dfs.https.address</name>
-  <value>0.0.0.0:50470</value>
+  <name>dfs.https.need.client.auth</name>
+  <value>false</value>
+  <description>Whether SSL client certificate authentication is required
+  </description>
+</property>
+
+<property>
+  <name>dfs.https.server.keystore.resource</name>
+  <value>ssl-server.xml</value>
+  <description>Resource file from which ssl server keystore
+  information will be extracted
+  </description>
 </property>
 
 <property>
-  <name>https.keystore.info.rsrc</name>
-  <value>sslinfo.xml</value>
-  <description>The name of the resource from which ssl keystore information
-  will be extracted
+  <name>dfs.https.client.keystore.resource</name>
+  <value>ssl-client.xml</value>
+  <description>Resource file from which ssl client keystore
+  information will be extracted
   </description>
 </property>
 
+<property>
+  <name>dfs.datanode.https.address</name>
+  <value>0.0.0.0:50475</value>
+</property>
+
+<property>
+  <name>dfs.https.address</name>
+  <value>0.0.0.0:50470</value>
+</property>
+
  <property>
   <name>dfs.datanode.dns.interface</name>
   <value>default</value>

Added: hadoop/core/trunk/conf/ssl-client.xml.example
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/conf/ssl-client.xml.example?rev=712309&view=auto
==============================================================================
--- hadoop/core/trunk/conf/ssl-client.xml.example (added)
+++ hadoop/core/trunk/conf/ssl-client.xml.example Fri Nov  7 15:37:16 2008
@@ -0,0 +1,57 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+<property>
+  <name>ssl.client.truststore.location</name>
+  <value></value>
+  <description>Truststore to be used by clients like distcp. Must be
+  specified.
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.truststore.password</name>
+  <value></value>
+  <description>Optional. Default value is "".
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.truststore.type</name>
+  <value>jks</value>
+  <description>Optional. Default value is "jks".
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.keystore.location</name>
+  <value></value>
+  <description>Keystore to be used by clients like distcp. Must be
+  specified.
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.keystore.password</name>
+  <value></value>
+  <description>Optional. Default value is "".
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.keystore.keypassword</name>
+  <value></value>
+  <description>Optional. Default value is "".
+  </description>
+</property>
+
+<property>
+  <name>ssl.client.keystore.type</name>
+  <value>jks</value>
+  <description>Optional. Default value is "jks".
+  </description>
+</property>
+
+</configuration>

Added: hadoop/core/trunk/conf/ssl-server.xml.example
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/conf/ssl-server.xml.example?rev=712309&view=auto
==============================================================================
--- hadoop/core/trunk/conf/ssl-server.xml.example (added)
+++ hadoop/core/trunk/conf/ssl-server.xml.example Fri Nov  7 15:37:16 2008
@@ -0,0 +1,55 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+<property>
+  <name>ssl.server.truststore.location</name>
+  <value></value>
+  <description>Truststore to be used by NN and DN. Must be specified.
+  </description>
+</property>
+
+<property>
+  <name>ssl.server.truststore.password</name>
+  <value></value>
+  <description>Optional. Default value is "".
+  </description>
+</property>
+
+<property>
+  <name>ssl.server.truststore.type</name>
+  <value>jks</value>
+  <description>Optional. Default value is "jks".
+  </description>
+</property>
+
+<property>
+  <name>ssl.server.keystore.location</name>
+  <value></value>
+  <description>Keystore to be used by NN and DN. Must be specified.
+  </description>
+</property>
+
+<property>
+  <name>ssl.server.keystore.password</name>
+  <value></value>
+  <description>Must be specified.
+  </description>
+</property>
+
+<property>
+  <name>ssl.server.keystore.keypassword</name>
+  <value></value>
+  <description>Must be specified.
+  </description>
+</property>
+
+<property>
+  <name>ssl.server.keystore.type</name>
+  <value>jks</value>
+  <description>Optional. Default value is "jks".
+  </description>
+</property>
+
+</configuration>

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java Fri Nov  7 15:37:16 2008
@@ -309,7 +309,9 @@
    * @param keystore location of the keystore
    * @param storPass password for the keystore
    * @param keyPass password for the key
+   * @deprecated Use {@link #addSslListener(InetSocketAddress, Configuration, boolean)}
    */
+  @Deprecated
   public void addSslListener(InetSocketAddress addr, String keystore,
       String storPass, String keyPass) throws IOException {
     if (sslListener != null || webServer.isStarted()) {
@@ -325,6 +327,37 @@
   }
 
   /**
+   * Configure an ssl listener on the server.
+   * @param addr address to listen on
+   * @param sslConf conf to retrieve ssl options
+   * @param needClientAuth whether client authentication is required
+   */
+  public void addSslListener(InetSocketAddress addr, Configuration sslConf,
+      boolean needClientAuth) throws IOException {
+    if (sslListener != null || webServer.isStarted()) {
+      throw new IOException("Failed to add ssl listener");
+    }
+    if (needClientAuth) {
+      // setting up SSL truststore for authenticating clients
+      System.setProperty("javax.net.ssl.trustStore", sslConf.get(
+          "ssl.server.truststore.location", ""));
+      System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
+          "ssl.server.truststore.password", ""));
+      System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
+          "ssl.server.truststore.type", "jks"));
+    }
+    sslListener = new SslListener();
+    sslListener.setHost(addr.getHostName());
+    sslListener.setPort(addr.getPort());
+    sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
+    sslListener.setPassword(sslConf.get("ssl.server.keystore.password", ""));
+    sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword", ""));
+    sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks"));
+    sslListener.setNeedClientAuth(needClientAuth);
+    webServer.addListener(sslListener);
+  }
+
+  /**
    * Start the server. Does not wait for the server to start.
    */
   public void start() throws IOException {
@@ -385,4 +418,4 @@
       ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);      
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java Fri Nov  7 15:37:16 2008
@@ -19,14 +19,16 @@
 package org.apache.hadoop.hdfs;
 
 import java.io.IOException;
-
 import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.UnknownHostException;
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSession;
 
-import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
 
 /** An implementation of a protocol for accessing filesystems over HTTPS.
  * The following implementation provides a limited, read-only interface
@@ -37,12 +39,42 @@
 public class HsftpFileSystem extends HftpFileSystem {
 
   @Override
+  public void initialize(URI name, Configuration conf) throws IOException {
+    super.initialize(name, conf);
+    setupSsl(conf);
+  }
+
+  /** Set up SSL resources */
+  private static void setupSsl(Configuration conf) {
+    Configuration sslConf = new Configuration(false);
+    sslConf.addResource(conf.get("dfs.https.client.keystore.resource",
+        "ssl-client.xml"));
+    System.setProperty("javax.net.ssl.trustStore", sslConf.get(
+        "ssl.client.truststore.location", ""));
+    System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
+        "ssl.client.truststore.password", ""));
+    System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
+        "ssl.client.truststore.type", "jks"));
+    System.setProperty("javax.net.ssl.keyStore", sslConf.get(
+        "ssl.client.keystore.location", ""));
+    System.setProperty("javax.net.ssl.keyStorePassword", sslConf.get(
+        "ssl.client.keystore.password", ""));
+    System.setProperty("javax.net.ssl.keyPassword", sslConf.get(
+        "ssl.client.keystore.keypassword", ""));
+    System.setProperty("javax.net.ssl.keyStoreType", sslConf.get(
+        "ssl.client.keystore.type", "jks"));
+  }
+
+  @Override
   protected HttpURLConnection openConnection(String path, String query)
       throws IOException {
     try {
       final URL url = new URI("https", null, pickOneAddress(nnAddr.getHostName()),
           nnAddr.getPort(), path, query, null).toURL();
-      return (HttpURLConnection)url.openConnection();
+      HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
+      // bypass hostname verification
+      conn.setHostnameVerifier(new DummyHostnameVerifier());
+      return (HttpURLConnection)conn;
     } catch (URISyntaxException e) {
       throw (IOException)new IOException().initCause(e);
     }
@@ -60,4 +92,13 @@
     }
   }
 
+  /**
+   * Dummy hostname verifier that is used to bypass hostname checking
+   */
+  protected static class DummyHostnameVerifier implements HostnameVerifier {
+    public boolean verify(String hostname, SSLSession session) {
+      return true;
+    }
+  }
+
 }

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java Fri Nov  7 15:37:16 2008
@@ -346,15 +346,14 @@
     int tmpInfoPort = infoSocAddr.getPort();
     this.infoServer = new HttpServer("datanode", infoHost, tmpInfoPort,
         tmpInfoPort == 0, conf);
-    InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(
-        conf.get("dfs.datanode.https.address", infoHost + ":" + 0));
-    Configuration sslConf = new Configuration(conf);
-    sslConf.addResource(conf.get("https.keystore.info.rsrc", "sslinfo.xml"));
-    String keyloc = sslConf.get("https.keystore.location");
-    if (null != keyloc) {
-      this.infoServer.addSslListener(secInfoSocAddr, keyloc,
-          sslConf.get("https.keystore.password", ""),
-          sslConf.get("https.keystore.keypassword", ""));
+    if (conf.getBoolean("dfs.https.enable", false)) {
+      boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
+      InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
+          "dfs.datanode.https.address", infoHost + ":" + 0));
+      Configuration sslConf = new Configuration(false);
+      sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
+          "ssl-server.xml"));
+      this.infoServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
     }
     this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class);
     this.infoServer.addInternalServlet(null, "/getFileChecksum/*",

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Fri Nov  7 15:37:16 2008
@@ -352,21 +352,20 @@
     int tmpInfoPort = infoSocAddr.getPort();
     this.infoServer = new HttpServer("hdfs", infoHost, tmpInfoPort, 
         tmpInfoPort == 0, conf);
-    InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(
-        conf.get("dfs.https.address", infoHost + ":" + 0));
-    Configuration sslConf = new Configuration(conf);
-    sslConf.addResource(conf.get("https.keystore.info.rsrc", "sslinfo.xml"));
-    String keyloc = sslConf.get("https.keystore.location");
-    if (null != keyloc) {
-      this.infoServer.addSslListener(secInfoSocAddr, keyloc,
-          sslConf.get("https.keystore.password", ""),
-          sslConf.get("https.keystore.keypassword", ""));
-    }
-    // assume same ssl port for all datanodes
-    InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(
-        conf.get("dfs.datanode.https.address", infoHost + ":" + 50475));
-    this.infoServer.setAttribute("datanode.https.port",
-        datanodeSslPort.getPort());
+    if (conf.getBoolean("dfs.https.enable", false)) {
+      boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
+      InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
+          "dfs.https.address", infoHost + ":" + 0));
+      Configuration sslConf = new Configuration(false);
+      sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
+          "ssl-server.xml"));
+      this.infoServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
+      // assume same ssl port for all datanodes
+      InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
+          "dfs.datanode.https.address", infoHost + ":" + 50475));
+      this.infoServer.setAttribute("datanode.https.port", datanodeSslPort
+          .getPort());
+    }
     this.infoServer.setAttribute("name.node", nn);
     this.infoServer.setAttribute("name.node.address", nn.getNameNodeAddress());
     this.infoServer.setAttribute("name.system.image", getFSImage());

Modified: hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java?rev=712309&r1=712308&r2=712309&view=diff
==============================================================================
--- hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java Fri Nov  7 15:37:16 2008
@@ -95,6 +95,7 @@
     "\n-filelimit <n>         Limit the total number of files to be <= n" +
     "\n-sizelimit <n>         Limit the total size to be <= n bytes" +
     "\n-delete                Delete the files existing in the dst but not in src" +
+    "\n-mapredSslConf <f>     Filename of SSL configuration for mapper task" +
     
     "\n\nNOTE 1: if -overwrite or -update are set, each source URI is " +
     "\n      interpreted as an isomorphic update to an existing directory." +
@@ -608,7 +609,7 @@
 
     final Path dst = new Path(destPath);
     copy(conf, new Arguments(tmp, dst, logPath, flags, null,
-        Long.MAX_VALUE, Long.MAX_VALUE));
+        Long.MAX_VALUE, Long.MAX_VALUE, null));
   }
 
   /** Sanity check for srcPath */
@@ -640,6 +641,9 @@
     if (args.preservedAttributes != null) {
       job.set(PRESERVE_STATUS_LABEL, args.preservedAttributes);
     }
+    if (args.mapredSslConf != null) {
+      job.set("dfs.https.client.keystore.resource", args.mapredSslConf);
+    }
     
     //Initialize the mapper
     try {
@@ -714,6 +718,7 @@
     final String preservedAttributes;
     final long filelimit;
     final long sizelimit;
+    final String mapredSslConf;
     
     /**
      * Arguments for distcp
@@ -727,7 +732,7 @@
      */
     Arguments(List<Path> srcs, Path dst, Path log,
         EnumSet<Options> flags, String preservedAttributes,
-        long filelimit, long sizelimit) {
+        long filelimit, long sizelimit, String mapredSslConf) {
       this.srcs = srcs;
       this.dst = dst;
       this.log = log;
@@ -735,6 +740,7 @@
       this.preservedAttributes = preservedAttributes;
       this.filelimit = filelimit;
       this.sizelimit = sizelimit;
+      this.mapredSslConf = mapredSslConf;
       
       if (LOG.isTraceEnabled()) {
         LOG.trace("this = " + this);
@@ -748,6 +754,7 @@
       Path log = null;
       EnumSet<Options> flags = EnumSet.noneOf(Options.class);
       String presevedAttributes = null;
+      String mapredSslConf = null;
       long filelimit = Long.MAX_VALUE;
       long sizelimit = Long.MAX_VALUE;
 
@@ -778,6 +785,11 @@
             throw new IllegalArgumentException("logdir not specified in -log");
           }
           log = new Path(args[idx]);
+        } else if ("-mapredSslConf".equals(args[idx])) {
+          if (++idx ==  args.length) {
+            throw new IllegalArgumentException("ssl conf file not specified in -mapredSslConf");
+          }
+          mapredSslConf = args[idx];
         } else if ("-m".equals(args[idx])) {
           if (++idx == args.length) {
             throw new IllegalArgumentException("num_maps not specified in -m");
@@ -814,7 +826,7 @@
             + Options.UPDATE + ".");
       }
       return new Arguments(srcs, dst, log, flags, presevedAttributes,
-          filelimit, sizelimit);
+          filelimit, sizelimit, mapredSslConf);
     }
     
     /** {@inheritDoc} */
@@ -827,6 +839,7 @@
           + "\n  preservedAttributes = " + preservedAttributes 
           + "\n  filelimit = " + filelimit 
           + "\n  sizelimit = " + sizelimit
+          + "\n  mapredSslConf = " + mapredSslConf
           + "\n}"; 
     }
   }