You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by su...@apache.org on 2011/06/03 01:16:31 UTC

svn commit: r1130870 - in /hadoop/hdfs/trunk: ./ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/server/namenode/ src/java/org/apache/hadoop/hdfs/tools/ src/test/hdfs/org/apache/hadoop/hdfs/

Author: suresh
Date: Thu Jun  2 23:16:31 2011
New Revision: 1130870

URL: http://svn.apache.org/viewvc?rev=1130870&view=rev
Log:
HDFS-1986. Add option to get http/https address from DFSUtil#getInfoServer(). Contributed by Tanping Wang.


Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUtil.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=1130870&r1=1130869&r2=1130870&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Thu Jun  2 23:16:31 2011
@@ -476,6 +476,9 @@ Trunk (unreleased changes)
     HDFS-1968.  Enhance TestWriteRead to support position/sequential read,
     append, truncate and verbose options.  (CW Chung via szetszwo)
 
+    HDFS-1986. Add option to get http/https address from 
+    DFSUtil#getInfoServer(). (Tanping via suresh)
+
   OPTIMIZATIONS
 
     HDFS-1458. Improve checkpoint performance by avoiding unnecessary image

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1130870&r1=1130869&r2=1130870&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java Thu Jun  2 23:16:31 2011
@@ -444,22 +444,23 @@ public class DFSUtil {
   }
 
   /**
-   * return HTTP server info from the configuration
+   * return server http or https address from the configuration
    * @param conf
    * @param namenode - namenode address
-   * @return http server info
+   * @param httpsAddress -If true, and if security is enabled, returns server 
+   *                      https address. If false, returns server http address.
+   * @return server http or https address
    */
   public static String getInfoServer(
-      InetSocketAddress namenode, Configuration conf) {
+      InetSocketAddress namenode, Configuration conf, boolean httpsAddress) {
     String httpAddress = null;
     
-    String httpAddressKey = UserGroupInformation.isSecurityEnabled() ?
-        DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY
+    String httpAddressKey = (UserGroupInformation.isSecurityEnabled() 
+        && httpsAddress) ? DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY
         : DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY;
-    String httpAddressDefault = UserGroupInformation.isSecurityEnabled() ?
-        DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT 
-        :DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT;
-    
+    String httpAddressDefault = (UserGroupInformation.isSecurityEnabled() 
+        && httpsAddress) ? DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT
+        : DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT;
     if(namenode != null) {
       // if non-default namenode, try reverse look up 
       // the nameServiceID if it is available

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java?rev=1130870&r1=1130869&r2=1130870&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java Thu Jun  2 23:16:31 2011
@@ -374,7 +374,7 @@ class ClusterJspHelper {
       nn.blocksCount = mxbeanProxy.getTotalBlocks();
       nn.missingBlocksCount = mxbeanProxy.getNumberOfMissingBlocks();
       nn.free = mxbeanProxy.getFree();
-      nn.httpAddress = DFSUtil.getInfoServer(rpcAddress, conf);
+      nn.httpAddress = DFSUtil.getInfoServer(rpcAddress, conf, false);
       getLiveNodeCount(mxbeanProxy.getLiveNodes(), nn);
       getDeadNodeCount(mxbeanProxy.getDeadNodes(), nn);
       return nn;

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1130870&r1=1130869&r2=1130870&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Thu Jun  2 23:16:31 2011
@@ -412,7 +412,7 @@ public class SecondaryNameNode implement
       throw new IOException("This is not a DFS");
     }
 
-    String configuredAddress = DFSUtil.getInfoServer(null, conf);
+    String configuredAddress = DFSUtil.getInfoServer(null, conf, true);
     InetSocketAddress sockAddr = NetUtils.createSocketAddr(configuredAddress);
     if (sockAddr.getAddress().isAnyLocalAddress()) {
       if(UserGroupInformation.isSecurityEnabled()) {

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java?rev=1130870&r1=1130869&r2=1130870&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java Thu Jun  2 23:16:31 2011
@@ -229,7 +229,7 @@ public class DFSck extends Configured im
     InetSocketAddress namenode = 
       NameNode.getAddress(dfs.getUri().getAuthority());
     
-    return DFSUtil.getInfoServer(namenode, conf);
+    return DFSUtil.getInfoServer(namenode, conf, true);
   }
 
   private int doWork(final String[] args) throws IOException {

Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUtil.java?rev=1130870&r1=1130869&r2=1130870&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUtil.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUtil.java Thu Jun  2 23:16:31 2011
@@ -29,14 +29,19 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
 
+import junit.framework.Assert;
+
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
+
 
 public class TestDFSUtil {
   /**
@@ -233,4 +238,16 @@ public class TestDFSUtil {
     } catch (IOException expected) {
     }
   }
+  
+  @Test
+  public void testGetServerInfo(){
+    HdfsConfiguration conf = new HdfsConfiguration();
+    conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+    String httpsport = DFSUtil.getInfoServer(null, conf, true);
+    Assert.assertEquals("0.0.0.0:50470", httpsport);
+    String httpport = DFSUtil.getInfoServer(null, conf, false);
+    Assert.assertEquals("0.0.0.0:50070", httpport);
+  }
+
 }
\ No newline at end of file