You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2011/08/23 00:59:18 UTC

svn commit: r1160489 - in /hadoop/common/branches/branch-0.20-security: CHANGES.txt src/contrib/hdfsproxy/build.xml src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java

Author: jitendra
Date: Mon Aug 22 22:59:18 2011
New Revision: 1160489

URL: http://svn.apache.org/viewvc?rev=1160489&view=rev
Log:
HDFS-1164. TestHdfsProxy is failing. Contributed by Todd Lipcon.

Modified:
    hadoop/common/branches/branch-0.20-security/CHANGES.txt
    hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/build.xml
    hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java

Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1160489&r1=1160488&r2=1160489&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Mon Aug 22 22:59:18 2011
@@ -91,6 +91,8 @@ Release 0.20.205.0 - unreleased
     HDFS-1211. Block receiver should not log "rewind" packets at INFO level.
     (Todd Lipcon)
 
+    HDFS-1164. TestHdfsProxy is failing. (Todd Lipcon)
+
 Release 0.20.204.0 - unreleased
 
   NEW FEATURES

Modified: hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/build.xml?rev=1160489&r1=1160488&r2=1160489&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/build.xml Mon Aug 22 22:59:18 2011
@@ -156,6 +156,8 @@
         <include name="slf4j-api-${slf4j-api.version}.jar"/>
         <include name="slf4j-log4j12-${slf4j-log4j12.version}.jar"/>
         <include name="xmlenc-${xmlenc.version}.jar"/>
+	<include name="jetty-${jetty.version}.jar"/>
+	<include name="jetty-util-${jetty-util.version}.jar"/>
       </lib>
       <lib dir="${hadoop.root}/lib">
         <include name="hadoop-core-*.jar"/>

Modified: hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java?rev=1160489&r1=1160488&r2=1160489&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java Mon Aug 22 22:59:18 2011
@@ -41,6 +41,7 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 
 /**
@@ -199,13 +200,16 @@ public class TestHdfsProxy extends TestC
     MiniDFSCluster cluster = null;
     HdfsProxy proxy = null;
     try {
+      final UserGroupInformation CLIENT_UGI = UserGroupInformation.getCurrentUser();
+      final String testUser = CLIENT_UGI.getShortUserName();
+      final String testGroup = CLIENT_UGI.getGroupNames()[0];
 
       final Configuration dfsConf = new Configuration();
-      dfsConf.set("hadoop.proxyuser." + System.getProperty("user.name") +
-          ".groups", "users");
-      dfsConf.set("hadoop.proxyuser.users.hosts", "127.0.0.1,localhost");
-      dfsConf.set("hadoop.proxyuser." + System.getProperty("user.name") +
-          ".hosts", "127.0.0.1,localhost");
+      dfsConf.set("hadoop.proxyuser." + testUser + ".groups", testGroup);
+      dfsConf.set("hadoop.proxyuser." + testGroup + ".hosts",
+          "127.0.0.1,localhost");
+      dfsConf.set("hadoop.proxyuser." + testUser + ".hosts",
+          "127.0.0.1,localhost");
       dfsConf.set("hadoop.security.authentication", "simple");
       
       //make sure server will look at the right config