You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aw...@apache.org on 2015/12/11 19:59:56 UTC

hadoop git commit: Revert "HDFS-9525. hadoop utilities need to support provided delegation tokens (HeeSoo Kim via aw)"

Repository: hadoop
Updated Branches:
  refs/heads/trunk b7b292367 -> 576b569b6


Revert "HDFS-9525. hadoop utilities need to support provided delegation tokens (HeeSoo Kim via aw)"

This reverts commit 832b3cbde1c2f77b04c93188e3a94420974090cf.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/576b569b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/576b569b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/576b569b

Branch: refs/heads/trunk
Commit: 576b569b6c97bd5f57e52efdabdf8c2fa996a524
Parents: b7b2923
Author: Allen Wittenauer <aw...@apache.org>
Authored: Fri Dec 11 10:59:42 2015 -0800
Committer: Allen Wittenauer <aw...@apache.org>
Committed: Fri Dec 11 10:59:42 2015 -0800

----------------------------------------------------------------------
 .../fs/CommonConfigurationKeysPublic.java       |  3 -
 .../hadoop/security/UserGroupInformation.java   | 27 +-------
 .../src/main/resources/core-default.xml         |  6 --
 .../security/TestUserGroupInformation.java      | 46 +------------
 .../hadoop/hdfs/web/WebHdfsFileSystem.java      | 70 ++++++--------------
 .../hdfs/web/resources/DelegationParam.java     |  6 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 -
 .../web/resources/NamenodeWebHdfsMethods.java   |  4 ++
 .../hadoop/hdfs/web/TestWebHdfsTokens.java      | 69 ++++---------------
 .../apache/hadoop/hdfs/web/TestWebHdfsUrl.java  | 33 ++-------
 10 files changed, 49 insertions(+), 218 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index 648ad59..c9f758b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -311,9 +311,6 @@ public class CommonConfigurationKeysPublic {
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String HADOOP_SECURITY_DNS_NAMESERVER_KEY =
     "hadoop.security.dns.nameserver";
-  /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
-  public static final String HADOOP_TOKEN_FILES =
-      "hadoop.token.files";
 
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
index a9871a5..483420c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.security;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES;
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import java.io.File;
@@ -249,9 +248,9 @@ public class UserGroupInformation {
   
   /**Environment variable pointing to the token cache file*/
   public static final String HADOOP_TOKEN_FILE_LOCATION = 
-      "HADOOP_TOKEN_FILE_LOCATION";
-
-  /**
+    "HADOOP_TOKEN_FILE_LOCATION";
+  
+  /** 
    * A method to initialize the fields that depend on a configuration.
    * Must be called before useKerberos or groups is used.
    */
@@ -822,26 +821,6 @@ public class UserGroupInformation {
       }
       loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, realUser);
 
-      String tokenFileLocation = System.getProperty(HADOOP_TOKEN_FILES);
-      if (tokenFileLocation == null) {
-        tokenFileLocation = conf.get(HADOOP_TOKEN_FILES);
-      }
-      if (tokenFileLocation != null) {
-        String[] tokenFileNames = tokenFileLocation.split("\\s*,\\s*+");
-        for (String tokenFileName: tokenFileNames) {
-          if (tokenFileName.length() > 0) {
-            File tokenFile = new File(tokenFileName);
-            if (tokenFile.exists() && tokenFile.isFile()) {
-              Credentials cred = Credentials.readTokenStorageFile(
-                  tokenFile, conf);
-              loginUser.addCredentials(cred);
-            } else {
-              LOG.info("tokenFile("+tokenFileName+") does not exist");
-            }
-          }
-        }
-      }
-
       String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
       if (fileLocation != null) {
         // Load the token storage file and put all of the tokens into the

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 4b69a65..c8f7f71 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -466,12 +466,6 @@ for ldap providers in the same way as above does.
   <description>Maps kerberos principals to local user names</description>
 </property>
 
-<property>
-  <name>hadoop.token.files</name>
-  <value></value>
-  <description>List of token cache files that have delegation tokens for hadoop service</description>
-</property>
-
 <!-- i/o properties -->
 <property>
   <name>io.file.buffer.size</name>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
index 0539a03..54cfc2d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
@@ -18,7 +18,6 @@ package org.apache.hadoop.security;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@@ -36,7 +35,6 @@ import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.LoginContext;
 
 import java.io.BufferedReader;
-import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.lang.reflect.Method;
@@ -863,7 +861,7 @@ public class TestUserGroupInformation {
 
     // Ensure only non-private tokens are returned
     Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens();
-    assertEquals(3, tokens.size());
+    assertEquals(1, tokens.size());
   }
 
   /**
@@ -930,46 +928,4 @@ public class TestUserGroupInformation {
       }
     }
   }
-
-  @Test (timeout = 30000)
-  public void testExternalTokenFiles() throws Exception {
-    StringBuilder tokenFullPathnames = new StringBuilder();
-    String tokenFilenames = "token1,token2";
-    String tokenFiles[] = tokenFilenames.split("\\s*,\\s*+");
-    final File testDir = new File("target",
-        TestUserGroupInformation.class.getName() + "-tmpDir").getAbsoluteFile();
-    String testDirPath = testDir.getAbsolutePath();
-
-    // create path for token files
-    for (String tokenFile: tokenFiles) {
-      if (tokenFullPathnames.length() > 0) {
-        tokenFullPathnames.append(",");
-      }
-      tokenFullPathnames.append(testDirPath).append("/").append(tokenFile);
-    }
-
-    // create new token and store it
-    TestTokenIdentifier tokenId = new TestTokenIdentifier();
-    Credentials cred1 = new Credentials();
-    Token<TestTokenIdentifier> token1 = new Token<TestTokenIdentifier>(
-            tokenId.getBytes(), "password".getBytes(),
-            tokenId.getKind(), new Text("token-service1"));
-    cred1.addToken(token1.getService(), token1);
-    cred1.writeTokenStorageFile(new Path(testDirPath, tokenFiles[0]), conf);
-
-    Credentials cred2 = new Credentials();
-    Token<TestTokenIdentifier> token2 = new Token<TestTokenIdentifier>(
-            tokenId.getBytes(), "password".getBytes(),
-            tokenId.getKind(), new Text("token-service2"));
-    cred2.addToken(token2.getService(), token2);
-    cred2.writeTokenStorageFile(new Path(testDirPath, tokenFiles[1]), conf);
-
-    // set property for token external token files
-    System.setProperty("hadoop.token.files", tokenFullPathnames.toString());
-    UserGroupInformation.setLoginUser(null);
-    UserGroupInformation tokenUgi = UserGroupInformation.getLoginUser();
-    Collection<Token<?>> credsugiTokens = tokenUgi.getTokens();
-    assertTrue(credsugiTokens.contains(token1));
-    assertTrue(credsugiTokens.contains(token2));
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index c2a7ef8..4049b80 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -231,7 +231,7 @@ public class WebHdfsFileSystem extends FileSystem
   // the first getAuthParams() for a non-token op will either get the
   // internal token from the ugi or lazy fetch one
   protected synchronized Token<?> getDelegationToken() throws IOException {
-    if (delegationToken == null) {
+    if (canRefreshDelegationToken && delegationToken == null) {
       Token<?> token = tokenSelector.selectToken(
           new Text(getCanonicalServiceName()), ugi.getTokens());
       // ugi tokens are usually indicative of a task which can't
@@ -241,13 +241,11 @@ public class WebHdfsFileSystem extends FileSystem
         LOG.debug("Using UGI token: {}", token);
         canRefreshDelegationToken = false;
       } else {
-        if (canRefreshDelegationToken) {
-          token = getDelegationToken(null);
-          if (token != null) {
-            LOG.debug("Fetched new token: {}", token);
-          } else { // security is disabled
-            canRefreshDelegationToken = false;
-          }
+        token = getDelegationToken(null);
+        if (token != null) {
+          LOG.debug("Fetched new token: {}", token);
+        } else { // security is disabled
+          canRefreshDelegationToken = false;
         }
       }
       setDelegationToken(token);
@@ -259,7 +257,6 @@ public class WebHdfsFileSystem extends FileSystem
   synchronized boolean replaceExpiredDelegationToken() throws IOException {
     boolean replaced = false;
     if (canRefreshDelegationToken) {
-      this.delegationToken = null;
       Token<?> token = getDelegationToken(null);
       LOG.debug("Replaced expired token: {}", token);
       setDelegationToken(token);
@@ -1349,7 +1346,7 @@ public class WebHdfsFileSystem extends FileSystem
     final HttpOpParam.Op op = GetOpParam.Op.LISTSTATUS;
     return new FsPathResponseRunner<FileStatus[]>(op, f) {
       @Override
-      FileStatus[] decodeResponse(Map<?, ?> json) {
+      FileStatus[] decodeResponse(Map<?,?> json) {
         final Map<?, ?> rootmap =
             (Map<?, ?>)json.get(FileStatus.class.getSimpleName() + "es");
         final List<?> array = JsonUtilClient.getList(rootmap,
@@ -1370,34 +1367,18 @@ public class WebHdfsFileSystem extends FileSystem
   }
 
   @Override
-  public synchronized Token<DelegationTokenIdentifier> getDelegationToken(
+  public Token<DelegationTokenIdentifier> getDelegationToken(
       final String renewer) throws IOException {
     final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
-    Token<DelegationTokenIdentifier> token = null;
-
-    if (delegationToken == null) {
-      token =
-          new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(
-              op, null, new RenewerParam(renewer)) {
+    Token<DelegationTokenIdentifier> token =
+        new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(
+            op, null, new RenewerParam(renewer)) {
           @Override
-          Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json)
+          Token<DelegationTokenIdentifier> decodeResponse(Map<?,?> json)
               throws IOException {
             return JsonUtilClient.toDelegationToken(json);
           }
         }.run();
-    } else {
-      token =
-          new FsPathResponseRunner<Token<DelegationTokenIdentifier>>(
-              op, null, new RenewerParam(renewer),
-              new DelegationParam(delegationToken.encodeToUrlString())) {
-          @Override
-          Token<DelegationTokenIdentifier> decodeResponse(Map<?, ?> json)
-              throws IOException {
-            return JsonUtilClient.toDelegationToken(json);
-          }
-        }.run();
-    }
-
     if (token != null) {
       token.setService(tokenServiceName);
     } else {
@@ -1425,26 +1406,13 @@ public class WebHdfsFileSystem extends FileSystem
   public synchronized long renewDelegationToken(final Token<?> token
   ) throws IOException {
     final HttpOpParam.Op op = PutOpParam.Op.RENEWDELEGATIONTOKEN;
-
-    if (delegationToken == null) {
-      return new FsPathResponseRunner<Long>(op, null,
-          new TokenArgumentParam(token.encodeToUrlString())) {
-        @Override
-        Long decodeResponse(Map<?, ?> json) throws IOException {
-          return ((Number) json.get("long")).longValue();
-        }
-      }.run();
-    } else {
-      return new FsPathResponseRunner<Long>(op, null,
-          new TokenArgumentParam(token.encodeToUrlString()),
-          new DelegationParam(delegationToken.encodeToUrlString())) {
-        @Override
-        Long decodeResponse(Map<?, ?> json) throws IOException {
-          return ((Number) json.get("long")).longValue();
-        }
-      }.run();
-    }
-
+    return new FsPathResponseRunner<Long>(op, null,
+        new TokenArgumentParam(token.encodeToUrlString())) {
+      @Override
+      Long decodeResponse(Map<?,?> json) throws IOException {
+        return ((Number) json.get("long")).longValue();
+      }
+    }.run();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
index fb129d8..5329580 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfs.web.resources;
 
+import org.apache.hadoop.security.UserGroupInformation;
+
 /** Represents delegation token used for authentication. */
 public class DelegationParam extends StringParam {
   /** Parameter name. */
@@ -31,8 +33,8 @@ public class DelegationParam extends StringParam {
    * @param str a string representation of the parameter value.
    */
   public DelegationParam(final String str) {
-    super(DOMAIN, str != null && !str.equals(DEFAULT)? str: null);
-
+    super(DOMAIN, UserGroupInformation.isSecurityEnabled()
+        && str != null && !str.equals(DEFAULT)? str: null);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index c1a323b..f84e1e5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -55,9 +55,6 @@ Trunk (Unreleased)
     HDFS-9057. allow/disallow snapshots via webhdfs
     (Bramma Reddy Battula via vinayakumarb)
 
-    HDFS-9525. hadoop utilities need to support provided delegation tokens
-    (HeeSoo Kim via aw)
-
   IMPROVEMENTS
 
     HDFS-4665. Move TestNetworkTopologyWithNodeGroup to common.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
index 5e602b5..4626507 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
@@ -895,6 +895,10 @@ public class NamenodeWebHdfsMethods {
     }
     case GETDELEGATIONTOKEN:
     {
+      if (delegation.getValue() != null) {
+        throw new IllegalArgumentException(delegation.getName()
+            + " parameter is not null.");
+      }
       final Token<? extends TokenIdentifier> token = generateDelegationToken(
           namenode, ugi, renewer.getValue());
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
index b17cb4d..5e8568c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
@@ -296,59 +296,7 @@ public class TestWebHdfsTokens {
       }
     }
   }
-
-  @Test
-  public void testReuseToken() throws Exception {
-    MiniDFSCluster cluster = null;
-
-    UserGroupInformation loginUgi = UserGroupInformation.createUserForTesting(
-        "LoginUser", new String[]{"supergroup"});
-
-    try {
-      final Configuration clusterConf = new HdfsConfiguration(conf);
-      SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
-      clusterConf.setBoolean(DFSConfigKeys
-          .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
-      UserGroupInformation.setConfiguration(clusterConf);
-      UserGroupInformation.setLoginUser(loginUgi);
-
-      cluster = new MiniDFSCluster.Builder(clusterConf).numDataNodes(0).build();
-      cluster.waitActive();
-
-      /* create SIMPLE client connection */
-      SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
-      UserGroupInformation.setConfiguration(clusterConf);
-      UserGroupInformation simpleUgi = UserGroupInformation.createUserForTesting(
-          "testUser", new String[]{"supergroup"});
-      final WebHdfsFileSystem simpleFs = WebHdfsTestUtil.getWebHdfsFileSystemAs
-          (simpleUgi, clusterConf, "webhdfs");
-
-      /* create KERBEROS client connection */
-      SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
-      UserGroupInformation.setConfiguration(clusterConf);
-      UserGroupInformation krbUgi = UserGroupInformation.createUserForTesting(
-          "testUser", new String[]{"supergroup"});
-      final WebHdfsFileSystem krbFs = WebHdfsTestUtil.getWebHdfsFileSystemAs
-              (krbUgi, clusterConf, "webhdfs");
-
-      // 1. Get initial token through kerberos client connection
-      Token<DelegationTokenIdentifier> krbToken
-        = krbFs.getDelegationToken(null);
-      Assert.assertNotNull(krbToken);
-
-      // 2. Get token with previous token which gets from kerberos connection
-      //    through SIMPLE client connection.
-      simpleFs.setDelegationToken(krbToken);
-      Token<?> simpleToken =  simpleFs.getDelegationToken();
-      Assert.assertNotNull(simpleToken);
-      Assert.assertEquals(krbToken.getService(), simpleToken.getService());
-    } finally {
-      if (cluster != null) {
-        cluster.shutdown();
-      }
-    }
-  }
-
+  
   @SuppressWarnings("unchecked")
   private void validateLazyTokenFetch(final Configuration clusterConf) throws Exception{
     final String testUser = "DummyUser";
@@ -360,6 +308,16 @@ public class TestWebHdfsTokens {
         return spy((WebHdfsFileSystem) FileSystem.newInstance(uri, clusterConf));
 	  }
     });
+    // verify token ops don't get a token
+    Assert.assertNull(fs.getRenewToken());
+    Token<?> token = fs.getDelegationToken(null);
+    fs.renewDelegationToken(token);
+    fs.cancelDelegationToken(token);
+    verify(fs, never()).getDelegationToken();
+    verify(fs, never()).replaceExpiredDelegationToken();
+    verify(fs, never()).setDelegationToken(any(Token.class));
+    Assert.assertNull(fs.getRenewToken());
+    reset(fs);
 
     // verify first non-token op gets a token
     final Path p = new Path("/f");
@@ -368,8 +326,8 @@ public class TestWebHdfsTokens {
     verify(fs, never()).replaceExpiredDelegationToken();
     verify(fs, times(1)).getDelegationToken(anyString());
     verify(fs, times(1)).setDelegationToken(any(Token.class));
-    Token<?> token = fs.getRenewToken();
-    Assert.assertNotNull(token);
+    token = fs.getRenewToken();
+    Assert.assertNotNull(token);      
     Assert.assertEquals(testUser, getTokenOwner(token));
     Assert.assertEquals(fs.getTokenKind(), token.getKind());
     reset(fs);
@@ -463,7 +421,6 @@ public class TestWebHdfsTokens {
     verify(fs, times(1)).cancelDelegationToken(eq(token2));
 
     // add a token to ugi for a new fs, verify it uses that token
-    fs.setDelegationToken(null);
     token = fs.getDelegationToken(null);
     ugi.addToken(token);
     fs = ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/576b569b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
index ba1c8a2..2913a97 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
@@ -195,33 +195,9 @@ public class TestWebHdfsUrl {
     checkQueryParams(
         new String[]{
             GetOpParam.Op.GETFILESTATUS.toQueryString(),
-            new DelegationParam(tokenString).toString()
-        },
-        fileStatusUrl);
-
-    // send user with delegationToken
-    getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN,
-        fsPath, new DelegationParam(tokenString));
-    checkQueryParams(
-        new String[]{
-            GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getShortUserName()).toString(),
-            new DelegationParam(tokenString).toString()
-        },
-        getTokenUrl);
-
-    // send user with delegationToken
-    renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
-        fsPath, new TokenArgumentParam(tokenString),
-        new DelegationParam(tokenString));
-    checkQueryParams(
-        new String[]{
-            PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getShortUserName()).toString(),
-            new TokenArgumentParam(tokenString).toString(),
-            new DelegationParam(tokenString).toString()
+            new UserParam(ugi.getShortUserName()).toString()
         },
-        renewTokenUrl);
+        fileStatusUrl);    
   }
 
   @Test(timeout=60000)
@@ -298,13 +274,14 @@ public class TestWebHdfsUrl {
             new TokenArgumentParam(tokenString).toString()
         },
         cancelTokenUrl);
-
+    
     // send real+effective
     fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
     checkQueryParams(
         new String[]{
             GetOpParam.Op.GETFILESTATUS.toQueryString(),
-            new DelegationParam(tokenString).toString()
+            new UserParam(ugi.getRealUser().getShortUserName()).toString(),
+            new DoAsParam(ugi.getShortUserName()).toString()
         },
         fileStatusUrl);    
   }