You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by gk...@apache.org on 2012/08/03 21:00:44 UTC

svn commit: r1369164 [3/4] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ dev-support/ src/ src/main/bin/ src/main/conf/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/o...

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Fri Aug  3 19:00:15 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.security;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
 
+import java.io.File;
 import java.io.IOException;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.security.AccessControlContext;
@@ -32,6 +33,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -62,6 +64,7 @@ import org.apache.hadoop.security.authen
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.Time;
 
 /**
  * User and group information for Hadoop.
@@ -453,9 +456,28 @@ public class UserGroupInformation {
       return null;
     }
   }
-  
+
+  /**
+   * Represents a javax.security configuration that is created at runtime.
+   */
+  private static class DynamicConfiguration
+      extends javax.security.auth.login.Configuration {
+    private AppConfigurationEntry[] ace;
+    
+    DynamicConfiguration(AppConfigurationEntry[] ace) {
+      this.ace = ace;
+    }
+    
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+      return ace;
+    }
+  }
+
   private static LoginContext
-  newLoginContext(String appName, Subject subject) throws LoginException {
+  newLoginContext(String appName, Subject subject,
+    javax.security.auth.login.Configuration loginConf)
+      throws LoginException {
     // Temporarily switch the thread's ContextClassLoader to match this
     // class's classloader, so that we can properly load HadoopLoginModule
     // from the JAAS libraries.
@@ -463,7 +485,7 @@ public class UserGroupInformation {
     ClassLoader oldCCL = t.getContextClassLoader();
     t.setContextClassLoader(HadoopLoginModule.class.getClassLoader());
     try {
-      return new LoginContext(appName, subject, null, new HadoopConfiguration());
+      return new LoginContext(appName, subject, null, loginConf);
     } finally {
       t.setContextClassLoader(oldCCL);
     }
@@ -516,6 +538,82 @@ public class UserGroupInformation {
   }
 
   /**
+   * Find the most appropriate UserGroupInformation to use
+   *
+   * @param ticketCachePath    The Kerberos ticket cache path, or NULL
+   *                           if none is specfied
+   * @param user               The user name, or NULL if none is specified.
+   *
+   * @return                   The most appropriate UserGroupInformation
+   */ 
+  public static UserGroupInformation getBestUGI(
+      String ticketCachePath, String user) throws IOException {
+    if (ticketCachePath != null) {
+      return getUGIFromTicketCache(ticketCachePath, user);
+    } else if (user == null) {
+      return getCurrentUser();
+    } else {
+      return createRemoteUser(user);
+    }    
+  }
+
+  /**
+   * Create a UserGroupInformation from a Kerberos ticket cache.
+   * 
+   * @param user                The principal name to load from the ticket
+   *                            cache
+   * @param ticketCachePath     the path to the ticket cache file
+   *
+   * @throws IOException        if the kerberos login fails
+   */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
+  public static UserGroupInformation getUGIFromTicketCache(
+            String ticketCache, String user) throws IOException {
+    if (!isSecurityEnabled()) {
+      return getBestUGI(null, user);
+    }
+    try {
+      Map<String,String> krbOptions = new HashMap<String,String>();
+      krbOptions.put("doNotPrompt", "true");
+      krbOptions.put("useTicketCache", "true");
+      krbOptions.put("useKeyTab", "false");
+      krbOptions.put("renewTGT", "false");
+      krbOptions.put("ticketCache", ticketCache);
+      krbOptions.putAll(HadoopConfiguration.BASIC_JAAS_OPTIONS);
+      AppConfigurationEntry ace = new AppConfigurationEntry(
+          KerberosUtil.getKrb5LoginModuleName(),
+          LoginModuleControlFlag.REQUIRED,
+          krbOptions);
+      DynamicConfiguration dynConf =
+          new DynamicConfiguration(new AppConfigurationEntry[]{ ace });
+      LoginContext login = newLoginContext(
+          HadoopConfiguration.USER_KERBEROS_CONFIG_NAME, null, dynConf);
+      login.login();
+
+      Subject loginSubject = login.getSubject();
+      Set<Principal> loginPrincipals = loginSubject.getPrincipals();
+      if (loginPrincipals.isEmpty()) {
+        throw new RuntimeException("No login principals found!");
+      }
+      if (loginPrincipals.size() != 1) {
+        LOG.warn("found more than one principal in the ticket cache file " +
+          ticketCache);
+      }
+      User ugiUser = new User(loginPrincipals.iterator().next().getName(),
+          AuthenticationMethod.KERBEROS, login);
+      loginSubject.getPrincipals().add(ugiUser);
+      UserGroupInformation ugi = new UserGroupInformation(loginSubject);
+      ugi.setLogin(login);
+      ugi.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
+      return ugi;
+    } catch (LoginException le) {
+      throw new IOException("failure to login using ticket cache file " +
+          ticketCache, le);
+    }
+  }
+
+  /**
    * Get the currently logged in user.
    * @return the logged in user
    * @throws IOException if login fails
@@ -530,10 +628,10 @@ public class UserGroupInformation {
         LoginContext login;
         if (isSecurityEnabled()) {
           login = newLoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME,
-              subject);
+              subject, new HadoopConfiguration());
         } else {
           login = newLoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME, 
-              subject);
+              subject, new HadoopConfiguration());
         }
         login.login();
         loginUser = new UserGroupInformation(subject);
@@ -613,7 +711,7 @@ public class UserGroupInformation {
             long nextRefresh = getRefreshTime(tgt);
             while (true) {
               try {
-                long now = System.currentTimeMillis();
+                long now = Time.now();
                 if(LOG.isDebugEnabled()) {
                   LOG.debug("Current time is " + now);
                   LOG.debug("Next refresh is " + nextRefresh);
@@ -673,17 +771,17 @@ public class UserGroupInformation {
     LoginContext login; 
     long start = 0;
     try {
-      login = 
-        newLoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject);
-      start = System.currentTimeMillis();
+      login = newLoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME,
+            subject, new HadoopConfiguration());
+      start = Time.now();
       login.login();
-      metrics.loginSuccess.add(System.currentTimeMillis() - start);
+      metrics.loginSuccess.add(Time.now() - start);
       loginUser = new UserGroupInformation(subject);
       loginUser.setLogin(login);
       loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.add(System.currentTimeMillis() - start);
+        metrics.loginFailure.add(Time.now() - start);
       }
       throw new IOException("Login failure for " + user + " from keytab " + 
                             path, le);
@@ -703,7 +801,7 @@ public class UserGroupInformation {
         || !isKeytab)
       return;
     KerberosTicket tgt = getTGT();
-    if (tgt != null && System.currentTimeMillis() < getRefreshTime(tgt)) {
+    if (tgt != null && Time.now() < getRefreshTime(tgt)) {
       return;
     }
     reloginFromKeytab();
@@ -727,7 +825,7 @@ public class UserGroupInformation {
          !isKeytab)
       return;
     
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     if (!hasSufficientTimeElapsed(now)) {
       return;
     }
@@ -756,16 +854,17 @@ public class UserGroupInformation {
         // login and also update the subject field of this instance to
         // have the new credentials (pass it to the LoginContext constructor)
         login = newLoginContext(
-            HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, getSubject());
+            HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, getSubject(),
+            new HadoopConfiguration());
         LOG.info("Initiating re-login for " + keytabPrincipal);
-        start = System.currentTimeMillis();
+        start = Time.now();
         login.login();
-        metrics.loginSuccess.add(System.currentTimeMillis() - start);
+        metrics.loginSuccess.add(Time.now() - start);
         setLogin(login);
       }
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.add(System.currentTimeMillis() - start);
+        metrics.loginFailure.add(Time.now() - start);
       }
       throw new IOException("Login failure for " + keytabPrincipal + 
           " from keytab " + keytabFile, le);
@@ -791,7 +890,7 @@ public class UserGroupInformation {
     if (login == null) {
       throw new IOException("login must be done first");
     }
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     if (!hasSufficientTimeElapsed(now)) {
       return;
     }
@@ -807,7 +906,7 @@ public class UserGroupInformation {
       //have the new credentials (pass it to the LoginContext constructor)
       login = 
         newLoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME, 
-            getSubject());
+            getSubject(), new HadoopConfiguration());
       LOG.info("Initiating re-login for " + getUserName());
       login.login();
       setLogin(login);
@@ -842,12 +941,13 @@ public class UserGroupInformation {
       keytabPrincipal = user;
       Subject subject = new Subject();
       
-      LoginContext login = 
-        newLoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject); 
+      LoginContext login = newLoginContext(
+          HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject,
+          new HadoopConfiguration());
        
-      start = System.currentTimeMillis();
+      start = Time.now();
       login.login();
-      metrics.loginSuccess.add(System.currentTimeMillis() - start);
+      metrics.loginSuccess.add(Time.now() - start);
       UserGroupInformation newLoginUser = new UserGroupInformation(subject);
       newLoginUser.setLogin(login);
       newLoginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
@@ -855,7 +955,7 @@ public class UserGroupInformation {
       return newLoginUser;
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.add(System.currentTimeMillis() - start);
+        metrics.loginFailure.add(Time.now() - start);
       }
       throw new IOException("Login failure for " + user + " from keytab " + 
                             path, le);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java Fri Aug  3 19:00:15 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.HadoopKerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -88,14 +89,17 @@ extends TokenIdentifier {
     if ( (owner == null) || ("".equals(owner.toString()))) {
       return null;
     }
+    final UserGroupInformation realUgi;
+    final UserGroupInformation ugi;
     if ((realUser == null) || ("".equals(realUser.toString()))
         || realUser.equals(owner)) {
-      return UserGroupInformation.createRemoteUser(owner.toString());
+      ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
     } else {
-      UserGroupInformation realUgi = UserGroupInformation
-          .createRemoteUser(realUser.toString());
-      return UserGroupInformation.createProxyUser(owner.toString(), realUgi);
+      realUgi = UserGroupInformation.createRemoteUser(realUser.toString());
+      ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi);
     }
+    realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
+    return ugi;
   }
 
   public Text getOwner() {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Fri Aug  3 19:00:15 2012
@@ -39,6 +39,7 @@ import org.apache.hadoop.security.Hadoop
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.util.Daemon;
+import org.apache.hadoop.util.Time;
 
 import com.google.common.base.Preconditions;
 
@@ -165,7 +166,7 @@ extends AbstractDelegationTokenIdentifie
     synchronized (this) {
       removeExpiredKeys();
       /* set final expiry date for retiring currentKey */
-      currentKey.setExpiryDate(System.currentTimeMillis() + tokenMaxLifetime);
+      currentKey.setExpiryDate(Time.now() + tokenMaxLifetime);
       /*
        * currentKey might have been removed by removeExpiredKeys(), if
        * updateMasterKey() isn't called at expected interval. Add it back to
@@ -177,7 +178,7 @@ extends AbstractDelegationTokenIdentifie
   }
 
   private synchronized void removeExpiredKeys() {
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     for (Iterator<Map.Entry<Integer, DelegationKey>> it = allKeys.entrySet()
         .iterator(); it.hasNext();) {
       Map.Entry<Integer, DelegationKey> e = it.next();
@@ -191,7 +192,7 @@ extends AbstractDelegationTokenIdentifie
   protected synchronized byte[] createPassword(TokenIdent identifier) {
     LOG.info("Creating password for identifier: "+identifier);
     int sequenceNum;
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     sequenceNum = ++delegationTokenSequenceNumber;
     identifier.setIssueDate(now);
     identifier.setMaxDate(now + tokenMaxLifetime);
@@ -211,7 +212,7 @@ extends AbstractDelegationTokenIdentifie
       throw new InvalidToken("token (" + identifier.toString()
           + ") can't be found in cache");
     }
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     if (info.getRenewDate() < now) {
       throw new InvalidToken("token (" + identifier.toString() + ") is expired");
     }
@@ -243,7 +244,7 @@ extends AbstractDelegationTokenIdentifie
    */
   public synchronized long renewToken(Token<TokenIdent> token,
                          String renewer) throws InvalidToken, IOException {
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
     DataInputStream in = new DataInputStream(buf);
     TokenIdent id = createIdentifier();
@@ -353,7 +354,7 @@ extends AbstractDelegationTokenIdentifie
   
   /** Remove expired delegation tokens from cache */
   private synchronized void removeExpiredToken() {
-    long now = System.currentTimeMillis();
+    long now = Time.now();
     Iterator<DelegationTokenInformation> i = currentTokens.values().iterator();
     while (i.hasNext()) {
       long renewDate = i.next().getRenewDate();
@@ -399,7 +400,7 @@ extends AbstractDelegationTokenIdentifie
           / (60 * 1000) + " min(s)");
       try {
         while (running) {
-          long now = System.currentTimeMillis();
+          long now = Time.now();
           if (lastMasterKeyUpdate + keyUpdateInterval < now) {
             try {
               rollMasterKey();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java Fri Aug  3 19:00:15 2012
@@ -126,12 +126,12 @@ public class AsyncDiskService {
   public synchronized boolean awaitTermination(long milliseconds) 
       throws InterruptedException {
 
-    long end = System.currentTimeMillis() + milliseconds;
+    long end = Time.now() + milliseconds;
     for (Map.Entry<String, ThreadPoolExecutor> e:
         executors.entrySet()) {
       ThreadPoolExecutor executor = e.getValue();
       if (!executor.awaitTermination(
-          Math.max(end - System.currentTimeMillis(), 0),
+          Math.max(end - Time.now(), 0),
           TimeUnit.MILLISECONDS)) {
         LOG.warn("AsyncDiskService awaitTermination timeout.");
         return false;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java Fri Aug  3 19:00:15 2012
@@ -78,26 +78,32 @@ public class DiskChecker {
   }
   
   /**
-   * Create the directory if it doesn't exist and
+   * Create the directory if it doesn't exist and check that dir is readable,
+   * writable and executable
+   *  
    * @param dir
    * @throws DiskErrorException
    */
   public static void checkDir(File dir) throws DiskErrorException {
     if (!mkdirsWithExistsCheck(dir))
-      throw new DiskErrorException("can not create directory: " 
+      throw new DiskErrorException("Can not create directory: "
                                    + dir.toString());
-        
+
     if (!dir.isDirectory())
-      throw new DiskErrorException("not a directory: " 
+      throw new DiskErrorException("Not a directory: "
                                    + dir.toString());
-            
+
     if (!dir.canRead())
-      throw new DiskErrorException("directory is not readable: " 
+      throw new DiskErrorException("Directory is not readable: "
                                    + dir.toString());
-            
+
     if (!dir.canWrite())
-      throw new DiskErrorException("directory is not writable: " 
+      throw new DiskErrorException("Directory is not writable: "
                                    + dir.toString());
+
+    if (!dir.canExecute())
+      throw new DiskErrorException("Directory is not executable: "
+	  + dir.toString());
   }
 
   /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java Fri Aug  3 19:00:15 2012
@@ -268,7 +268,8 @@ public class GenericOptionsParser {
     }
 
     if (line.hasOption("jt")) {
-      conf.set("mapred.job.tracker", line.getOptionValue("jt"));
+      conf.set("mapred.job.tracker", line.getOptionValue("jt"), 
+          "from -jt command line option");
     }
     if (line.hasOption("conf")) {
       String[] values = line.getOptionValues("conf");
@@ -278,7 +279,8 @@ public class GenericOptionsParser {
     }
     if (line.hasOption("libjars")) {
       conf.set("tmpjars", 
-               validateFiles(line.getOptionValue("libjars"), conf));
+               validateFiles(line.getOptionValue("libjars"), conf),
+               "from -libjars command line option");
       //setting libjars in client classpath
       URL[] libjars = getLibJars(conf);
       if(libjars!=null && libjars.length>0) {
@@ -290,18 +292,20 @@ public class GenericOptionsParser {
     }
     if (line.hasOption("files")) {
       conf.set("tmpfiles", 
-               validateFiles(line.getOptionValue("files"), conf));
+               validateFiles(line.getOptionValue("files"), conf),
+               "from -files command line option");
     }
     if (line.hasOption("archives")) {
       conf.set("tmparchives", 
-                validateFiles(line.getOptionValue("archives"), conf));
+                validateFiles(line.getOptionValue("archives"), conf),
+                "from -archives command line option");
     }
     if (line.hasOption('D')) {
       String[] property = line.getOptionValues('D');
       for(String prop : property) {
         String[] keyval = prop.split("=", 2);
         if (keyval.length == 2) {
-          conf.set(keyval[0], keyval[1]);
+          conf.set(keyval[0], keyval[1], "from command line");
         }
       }
     }
@@ -320,7 +324,7 @@ public class GenericOptionsParser {
         LOG.debug("setting conf tokensFile: " + fileName);
       }
       conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p)
-          .toString());
+          .toString(), "from -tokenCacheFile command line option");
 
     }
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progressable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progressable.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progressable.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progressable.java Fri Aug  3 19:00:15 2012
@@ -26,7 +26,7 @@ import org.apache.hadoop.classification.
  * 
  * <p>Clients and/or applications can use the provided <code>Progressable</code>
  * to explicitly report progress to the Hadoop framework. This is especially
- * important for operations which take an insignificant amount of time since,
+ * important for operations which take significant amount of time since,
  * in-lieu of the reported progress, the framework has to assume that an error
  * has occured and time-out the operation.</p>
  */

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java Fri Aug  3 19:00:15 2012
@@ -205,7 +205,7 @@ public class ReflectionUtils {
     boolean dumpStack = false;
     if (log.isInfoEnabled()) {
       synchronized (ReflectionUtils.class) {
-        long now = System.currentTimeMillis();
+        long now = Time.now();
         if (now - previousLogTime >= minInterval * 1000) {
           previousLogTime = now;
           dumpStack = true;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Fri Aug  3 19:00:15 2012
@@ -124,7 +124,7 @@ abstract public class Shell {
 
   /** check to see if a command needs to be executed and execute if needed */
   protected void run() throws IOException {
-    if (lastTime + interval > System.currentTimeMillis())
+    if (lastTime + interval > Time.now())
       return;
     exitCode = 0; // reset for next run
     runCommand();
@@ -223,7 +223,7 @@ abstract public class Shell {
         LOG.warn("Error while closing the error stream", ioe);
       }
       process.destroy();
-      lastTime = System.currentTimeMillis();
+      lastTime = Time.now();
     }
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Fri Aug  3 19:00:15 2012
@@ -202,8 +202,12 @@ public class StringUtils {
   }
   
   /**
-   * 
    * @param str
+   *          The string array to be parsed into an URI array.
+   * @return <tt>null</tt> if str is <tt>null</tt>, else the URI array
+   *         equivalent to str.
+   * @throws IllegalArgumentException
+   *           If any string in str violates RFC&nbsp;2396.
    */
   public static URI[] stringToURI(String[] str){
     if (str == null) 
@@ -213,9 +217,8 @@ public class StringUtils {
       try{
         uris[i] = new URI(str[i]);
       }catch(URISyntaxException ur){
-        System.out.println("Exception in specified URI's " + StringUtils.stringifyException(ur));
-        //making sure its asssigned to null in case of an error
-        uris[i] = null;
+        throw new IllegalArgumentException(
+            "Failed to create uri for " + str[i], ur);
       }
     }
     return uris;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java Fri Aug  3 19:00:15 2012
@@ -35,10 +35,10 @@ public class ThreadUtil {
    * @param millis the number of milliseconds for the current thread to sleep
    */
   public static void sleepAtLeastIgnoreInterrupts(long millis) {
-    long start = System.currentTimeMillis();
-    while (System.currentTimeMillis() - start < millis) {
+    long start = Time.now();
+    while (Time.now() - start < millis) {
       long timeToSleep = millis -
-          (System.currentTimeMillis() - start);
+          (Time.now() - start);
       try {
         Thread.sleep(timeToSleep);
       } catch (InterruptedException ie) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml Fri Aug  3 19:00:15 2012
@@ -144,7 +144,7 @@
       The HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
 
       The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
-      HTTP SPENGO specification.
+      HTTP SPNEGO specification.
     </description>
   </property>
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties Fri Aug  3 19:00:15 2012
@@ -106,7 +106,7 @@ hadoop.security.logger=INFO,NullAppender
 hadoop.security.log.maxfilesize=256MB
 hadoop.security.log.maxbackupindex=20
 log4j.category.SecurityLogger=${hadoop.security.logger}
-hadoop.security.log.file=SecurityAuth.audit
+hadoop.security.log.file=SecurityAuth-${user.name}.audit
 log4j.appender.RFAS=org.apache.log4j.RollingFileAppender 
 log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
 log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Fri Aug  3 19:00:15 2012
@@ -165,7 +165,7 @@
 
 <property>
   <name>hadoop.security.group.mapping.ldap.search.filter.user</name>
-  <value>(&amp;(objectClass=user)(sAMAccountName={0})</value>
+  <value>(&amp;(objectClass=user)(sAMAccountName={0}))</value>
   <description>
     An additional filter to use when searching for LDAP users. The default will
     usually be appropriate for Active Directory installations. If connecting to
@@ -1026,4 +1026,51 @@
   <name>hadoop.http.staticuser.user</name>
   <value>dr.who</value>
 </property>
+
+<!-- SSLFactory configuration -->
+
+<property>
+  <name>hadoop.ssl.keystores.factory.class</name>
+  <value>org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory</value>
+  <description>
+    The keystores factory to use for retrieving certificates.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.ssl.require.client.cert</name>
+  <value>false</value>
+  <description>Whether client certificates are required</description>
+</property>
+
+<property>
+  <name>hadoop.ssl.hostname.verifier</name>
+  <value>DEFAULT</value>
+  <description>
+    The hostname verifier to provide for HttpsURLConnections.
+    Valid values are: DEFAULT, STRICT, STRICT_I6, DEFAULT_AND_LOCALHOST and
+    ALLOW_ALL
+  </description>
+</property>
+
+<property>
+  <name>hadoop.ssl.server.conf</name>
+  <value>ssl-server.xml</value>
+  <description>
+    Resource file from which ssl server keystore information will be extracted.
+    This file is looked up in the classpath, typically it should be in Hadoop
+    conf/ directory.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.ssl.client.conf</name>
+  <value>ssl-client.xml</value>
+  <description>
+    Resource file from which ssl client keystore information will be extracted
+    This file is looked up in the classpath, typically it should be in Hadoop
+    conf/ directory.
+  </description>
+</property>
+
 </configuration>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm Fri Aug  3 19:00:15 2012
@@ -196,7 +196,9 @@ Deprecated Properties
 *---+---+
 |mapred.compress.map.output | mapreduce.map.output.compress
 *---+---+
-|mapred.create.symlink | mapreduce.job.cache.symlink.create
+|mapred.create.symlink | NONE - symlinking is always on
+*---+---+
+|mapreduce.job.cache.symlink.create | NONE - symlinking is always on
 *---+---+
 |mapred.data.field.separator | mapreduce.fieldsel.data.field.separator
 *---+---+

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1358480-1369130

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java Fri Aug  3 19:00:15 2012
@@ -64,7 +64,7 @@ public class TestConfServlet extends Tes
       String resource = (String)propertyInfo.get("resource");
       System.err.println("k: " + key + " v: " + val + " r: " + resource);
       if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
-          && Configuration.UNKNOWN_RESOURCE.equals(resource)) {
+          && "programatically".equals(resource)) {
         foundSetting = true;
       }
     }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Fri Aug  3 19:00:15 2012
@@ -18,10 +18,12 @@
 package org.apache.hadoop.conf;
 
 import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.StringWriter;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -77,6 +79,22 @@ public class TestConfiguration extends T
   private void addInclude(String filename) throws IOException{
     out.write("<xi:include href=\"" + filename + "\" xmlns:xi=\"http://www.w3.org/2001/XInclude\"  />\n ");
   }
+  
+  public void testInputStreamResource() throws Exception {
+    StringWriter writer = new StringWriter();
+    out = new BufferedWriter(writer);
+    startConfig();
+    declareProperty("prop", "A", "A");
+    endConfig();
+    
+    InputStream in1 = new ByteArrayInputStream(writer.toString().getBytes());
+    Configuration conf = new Configuration(false);
+    conf.addResource(in1);
+    assertEquals("A", conf.get("prop"));
+    InputStream in2 = new ByteArrayInputStream(writer.toString().getBytes());
+    conf.addResource(in2);
+    assertEquals("A", conf.get("prop"));
+  }
 
   public void testVariableSubstitution() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
@@ -168,7 +186,8 @@ public class TestConfiguration extends T
     appendProperty(name, val, false);
   }
  
-  void appendProperty(String name, String val, boolean isFinal)
+  void appendProperty(String name, String val, boolean isFinal, 
+      String ... sources)
     throws IOException {
     out.write("<property>");
     out.write("<name>");
@@ -180,6 +199,11 @@ public class TestConfiguration extends T
     if (isFinal) {
       out.write("<final>true</final>");
     }
+    for(String s : sources) {
+      out.write("<source>");
+      out.write(s);
+      out.write("</source>");
+    }
     out.write("</property>\n");
   }
   
@@ -671,16 +695,38 @@ public class TestConfiguration extends T
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
     conf.set("fs.defaultFS", "value");
+    String [] sources = conf.getPropertySources("test.foo");
+    assertEquals(1, sources.length);
     assertEquals(
         "Resource string returned for a file-loaded property" +
         " must be a proper absolute path",
         fileResource,
-        new Path(conf.getPropertySource("test.foo")));
-    assertEquals("Resource string returned for a set() property must be null",
-        null,
-        conf.getPropertySource("fs.defaultFS"));
+        new Path(sources[0]));
+    assertArrayEquals("Resource string returned for a set() property must be " +
+    		"\"programatically\"",
+        new String[]{"programatically"},
+        conf.getPropertySources("fs.defaultFS"));
     assertEquals("Resource string returned for an unset property must be null",
-        null, conf.getPropertySource("fs.defaultFoo"));
+        null, conf.getPropertySources("fs.defaultFoo"));
+  }
+  
+  public void testMultiplePropertySource() throws IOException {
+    out = new BufferedWriter(new FileWriter(CONFIG));
+    startConfig();
+    appendProperty("test.foo", "bar", false, "a", "b", "c");
+    endConfig();
+    Path fileResource = new Path(CONFIG);
+    conf.addResource(fileResource);
+    String [] sources = conf.getPropertySources("test.foo");
+    assertEquals(4, sources.length);
+    assertEquals("a", sources[0]);
+    assertEquals("b", sources[1]);
+    assertEquals("c", sources[2]);
+    assertEquals(
+        "Resource string returned for a file-loaded property" +
+        " must be a proper absolute path",
+        fileResource,
+        new Path(sources[3]));
   }
 
   public void testSocketAddress() {
@@ -929,7 +975,7 @@ public class TestConfiguration extends T
       confDump.put(prop.getKey(), prop);
     }
     assertEquals("value5",confDump.get("test.key6").getValue());
-    assertEquals("Unknown", confDump.get("test.key4").getResource());
+    assertEquals("programatically", confDump.get("test.key4").getResource());
     outWriter.close();
   }
   
@@ -1019,6 +1065,26 @@ public class TestConfiguration extends T
         "Not returning expected number of classes. Number of returned classes ="
             + classes.length, 0, classes.length);
   }
+  
+  public void testSettingValueNull() throws Exception {
+    Configuration config = new Configuration();
+    try {
+      config.set("testClassName", null);
+      fail("Should throw an IllegalArgumentException exception ");
+    } catch (Exception e) {
+      assertTrue(e instanceof IllegalArgumentException);
+    }
+  }
+
+  public void testSettingKeyNull() throws Exception {
+    Configuration config = new Configuration();
+    try {
+      config.set(null, "test");
+      fail("Should throw an IllegalArgumentException exception ");
+    } catch (Exception e) {
+      assertTrue(e instanceof IllegalArgumentException);
+    }
+  }
 
   public void testInvalidSubstitutation() {
     String key = "test.random.key";

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java Fri Aug  3 19:00:15 2012
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.conf;
 
+import org.apache.hadoop.util.Time;
 import org.junit.Test;
 import org.junit.Before;
 import static org.junit.Assert.*;
@@ -295,8 +296,8 @@ public class TestReconfiguration {
     }
     dummy.reconfigureProperty(PROP1, VAL2);
 
-    long endWait = System.currentTimeMillis() + 2000;
-    while (dummyThread.isAlive() && System.currentTimeMillis() < endWait) {
+    long endWait = Time.now() + 2000;
+    while (dummyThread.isAlive() && Time.now() < endWait) {
       try {
         Thread.sleep(50);
       } catch (InterruptedException ignore) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java Fri Aug  3 19:00:15 2012
@@ -34,8 +34,8 @@ import org.junit.Test;
 import java.security.PrivilegedExceptionAction;
 import java.util.concurrent.Semaphore;
 
-import static org.mockito.Mockito.mock;
-import static junit.framework.Assert.assertTrue;
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 
 public class TestFileSystemCaching {
@@ -49,6 +49,65 @@ public class TestFileSystemCaching {
     assertSame(fs1, fs2);
   }
 
+  static class DefaultFs extends LocalFileSystem {
+    URI uri;
+    @Override
+    public void initialize(URI uri, Configuration conf) {
+      this.uri = uri;
+    }
+    @Override
+    public URI getUri() {
+      return uri;
+    }
+  }
+  
+  @Test
+  public void testDefaultFsUris() throws Exception {
+    final Configuration conf = new Configuration();
+    conf.set("fs.defaultfs.impl", DefaultFs.class.getName());
+    final URI defaultUri = URI.create("defaultfs://host");
+    FileSystem.setDefaultUri(conf, defaultUri);
+    FileSystem fs = null;
+    
+    // sanity check default fs
+    final FileSystem defaultFs = FileSystem.get(conf);
+    assertEquals(defaultUri, defaultFs.getUri());
+    
+    // has scheme, no auth
+    fs = FileSystem.get(URI.create("defaultfs:/"), conf);
+    assertSame(defaultFs, fs);
+    fs = FileSystem.get(URI.create("defaultfs:///"), conf);
+    assertSame(defaultFs, fs);
+    
+    // has scheme, same auth
+    fs = FileSystem.get(URI.create("defaultfs://host"), conf);
+    assertSame(defaultFs, fs);
+    
+    // has scheme, different auth
+    fs = FileSystem.get(URI.create("defaultfs://host2"), conf);
+    assertNotSame(defaultFs, fs);
+    
+    // no scheme, no auth
+    fs = FileSystem.get(URI.create("/"), conf);
+    assertSame(defaultFs, fs);
+    
+    // no scheme, same auth
+    try {
+      fs = FileSystem.get(URI.create("//host"), conf);
+      fail("got fs with auth but no scheme");
+    } catch (Exception e) {
+      assertEquals("No FileSystem for scheme: null", e.getMessage());
+    }
+    
+    // no scheme, different auth
+    try {
+      fs = FileSystem.get(URI.create("//host2"), conf);
+      fail("got fs with auth but no scheme");
+    } catch (Exception e) {
+      assertEquals("No FileSystem for scheme: null", e.getMessage());
+    }
+  }
+  
   public static class InitializeForeverFileSystem extends LocalFileSystem {
     final static Semaphore sem = new Semaphore(0);
     public void initialize(URI uri, Configuration conf) throws IOException {
@@ -208,4 +267,84 @@ public class TestFileSystemCaching {
     });
     assertNotSame(fsA, fsA1);
   }
+  
+  @Test
+  public void testDelete() throws IOException {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);    
+    Path path = new Path("/a");
+
+    fs.delete(path, false);
+    verify(mockFs).delete(eq(path), eq(false));
+    reset(mockFs);
+    fs.delete(path, true);
+    verify(mockFs).delete(eq(path), eq(true));
+  }
+
+  @Test
+  public void testDeleteOnExit() throws IOException {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);
+    Path path = new Path("/a");
+
+    // delete on close if path does exist
+    when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
+    assertTrue(fs.deleteOnExit(path));
+    verify(mockFs).getFileStatus(eq(path));
+    reset(mockFs);
+    when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
+    fs.close();
+    verify(mockFs).getFileStatus(eq(path));
+    verify(mockFs).delete(eq(path), eq(true));
+  }
+
+  @Test
+  public void testDeleteOnExitFNF() throws IOException {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);
+    Path path = new Path("/a");
+
+    // don't delete on close if path doesn't exist
+    assertFalse(fs.deleteOnExit(path));
+    verify(mockFs).getFileStatus(eq(path));
+    reset(mockFs);
+    fs.close();
+    verify(mockFs, never()).getFileStatus(eq(path));
+    verify(mockFs, never()).delete(any(Path.class), anyBoolean());
+  }
+
+
+  @Test
+  public void testDeleteOnExitRemoved() throws IOException {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);
+    Path path = new Path("/a");
+
+    // don't delete on close if path existed, but later removed
+    when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
+    assertTrue(fs.deleteOnExit(path));
+    verify(mockFs).getFileStatus(eq(path));
+    reset(mockFs);
+    fs.close();
+    verify(mockFs).getFileStatus(eq(path));
+    verify(mockFs, never()).delete(any(Path.class), anyBoolean());
+  }
+
+  @Test
+  public void testCancelDeleteOnExit() throws IOException {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);
+    Path path = new Path("/a");
+
+    // don't delete on close if path existed, but later cancelled
+    when(mockFs.getFileStatus(eq(path))).thenReturn(new FileStatus());
+    assertTrue(fs.deleteOnExit(path));
+    verify(mockFs).getFileStatus(eq(path));
+    assertTrue(fs.cancelDeleteOnExit(path));
+    assertFalse(fs.cancelDeleteOnExit(path)); // false because not registered
+    reset(mockFs);
+    fs.close();
+    verify(mockFs, never()).getFileStatus(any(Path.class));
+    verify(mockFs, never()).delete(any(Path.class), anyBoolean());
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java Fri Aug  3 19:00:15 2012
@@ -179,7 +179,12 @@ public class TestFilterFileSystem {
     public Token<?> getDelegationToken(String renewer) throws IOException {
       return null;
     }
-
+    public boolean deleteOnExit(Path f) throws IOException {
+      return false;
+    }
+    public boolean cancelDeleteOnExit(Path f) throws IOException {
+      return false;
+    }
     public String getScheme() {
       return "dontcheck";
     }
@@ -281,6 +286,30 @@ public class TestFilterFileSystem {
     checkFsConf(flfs, conf, 3);
   }
 
+  @Test
+  public void testVerifyChecksumPassthru() {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);
+
+    fs.setVerifyChecksum(false);
+    verify(mockFs).setVerifyChecksum(eq(false));
+    reset(mockFs);
+    fs.setVerifyChecksum(true);
+    verify(mockFs).setVerifyChecksum(eq(true));
+  }
+
+  @Test
+  public void testWriteChecksumPassthru() {
+    FileSystem mockFs = mock(FileSystem.class);
+    FileSystem fs = new FilterFileSystem(mockFs);
+
+    fs.setWriteChecksum(false);
+    verify(mockFs).setWriteChecksum(eq(false));
+    reset(mockFs);
+    fs.setWriteChecksum(true);
+    verify(mockFs).setWriteChecksum(eq(true));
+  }
+
   private void checkInit(FilterFileSystem fs, boolean expectInit)
       throws Exception {
     URI uri = URI.create("filter:/");

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java Fri Aug  3 19:00:15 2012
@@ -122,6 +122,7 @@ public class TestGetFileBlockLocations e
     oneTest(0, (int) status.getLen() * 2, status);
     oneTest((int) status.getLen() * 2, (int) status.getLen() * 4, status);
     oneTest((int) status.getLen() / 2, (int) status.getLen() * 3, status);
+    oneTest((int) status.getLen(), (int) status.getLen() * 2, status);
     for (int i = 0; i < 10; ++i) {
       oneTest((int) status.getLen() * i / 10, (int) status.getLen() * (i + 1)
           / 10, status);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Fri Aug  3 19:00:15 2012
@@ -61,7 +61,7 @@ public class TestPath extends TestCase {
     assertEquals(pathString, new Path(pathString).toString());
   }
 
-  public void testNormalize() {
+  public void testNormalize() throws URISyntaxException {
     assertEquals("", new Path(".").toString());
     assertEquals("..", new Path("..").toString());
     assertEquals("/", new Path("/").toString());
@@ -75,6 +75,8 @@ public class TestPath extends TestCase {
     assertEquals("foo", new Path("foo/").toString());
     assertEquals("foo", new Path("foo//").toString());
     assertEquals("foo/bar", new Path("foo//bar").toString());
+    assertEquals("hdfs://foo/foo2/bar/baz/",
+        new Path(new URI("hdfs://foo//foo2///bar/baz///")).toString());
     if (Path.WINDOWS) {
       assertEquals("c:/a/b", new Path("c:\\a\\b").toString());
     }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Fri Aug  3 19:00:15 2012
@@ -32,6 +32,7 @@ import java.util.Set;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Time;
 
 /**
  * This class tests commands from Trash.
@@ -89,7 +90,7 @@ public class TestTrash extends TestCase 
    * @param base - the base path where files are created
    * @throws IOException
    */
-  protected static void trashShell(final FileSystem fs, final Path base)
+  public static void trashShell(final FileSystem fs, final Path base)
   throws IOException {
     Configuration conf = new Configuration();
     conf.set("fs.defaultFS", fs.getUri().toString());
@@ -600,7 +601,7 @@ public class TestTrash extends TestCase 
       
       writeFile(fs, myFile, 10);
       
-      start = System.currentTimeMillis();
+      start = Time.now();
       
       try {
         retVal = shell.run(args);
@@ -612,7 +613,7 @@ public class TestTrash extends TestCase 
       
       assertTrue(retVal == 0);
       
-      long iterTime = System.currentTimeMillis() - start;
+      long iterTime = Time.now() - start;
       // take median of the first 10 runs
       if(i<10) {
         if(i==0) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java Fri Aug  3 19:00:15 2012
@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Options.CreateOpts;
+import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -121,7 +122,7 @@ public class LoadGenerator extends Confi
   private double [] writeProbs = {0.3333};
   private volatile int currentIndex = 0;
   long totalTime = 0;
-  private long startTime = System.currentTimeMillis()+10000;
+  private long startTime = Time.now()+10000;
   final static private int BLOCK_SIZE = 10;
   private ArrayList<String> files = new ArrayList<String>();  // a table of file names
   private ArrayList<String> dirs = new ArrayList<String>(); // a table of directory names
@@ -232,9 +233,9 @@ public class LoadGenerator extends Confi
      * the entire file */
     private void read() throws IOException {
       String fileName = files.get(r.nextInt(files.size()));
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       InputStream in = fc.open(new Path(fileName));
-      executionTime[OPEN] += (System.currentTimeMillis()-startTime);
+      executionTime[OPEN] += (Time.now()-startTime);
       totalNumOfOps[OPEN]++;
       while (in.read(buffer) != -1) {}
       in.close();
@@ -254,9 +255,9 @@ public class LoadGenerator extends Confi
       double fileSize = 0;
       while ((fileSize = r.nextGaussian()+2)<=0) {}
       genFile(file, (long)(fileSize*BLOCK_SIZE));
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       fc.delete(file, true);
-      executionTime[DELETE] += (System.currentTimeMillis()-startTime);
+      executionTime[DELETE] += (Time.now()-startTime);
       totalNumOfOps[DELETE]++;
     }
     
@@ -265,9 +266,9 @@ public class LoadGenerator extends Confi
      */
     private void list() throws IOException {
       String dirName = dirs.get(r.nextInt(dirs.size()));
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       fc.listStatus(new Path(dirName));
-      executionTime[LIST] += (System.currentTimeMillis()-startTime);
+      executionTime[LIST] += (Time.now()-startTime);
       totalNumOfOps[LIST]++;
     }
   }
@@ -435,7 +436,7 @@ public class LoadGenerator extends Confi
     }
     
     if (r==null) {
-      r = new Random(System.currentTimeMillis()+hostHashCode);
+      r = new Random(Time.now()+hostHashCode);
     }
     
     return initFileDirTables();
@@ -571,7 +572,7 @@ public class LoadGenerator extends Confi
    */
   private void barrier() {
     long sleepTime;
-    while ((sleepTime = startTime - System.currentTimeMillis()) > 0) {
+    while ((sleepTime = startTime - Time.now()) > 0) {
       try {
         Thread.sleep(sleepTime);
       } catch (InterruptedException ex) {
@@ -583,20 +584,20 @@ public class LoadGenerator extends Confi
    * The file is filled with 'a'.
    */
   private void genFile(Path file, long fileSize) throws IOException {
-    long startTime = System.currentTimeMillis();
+    long startTime = Time.now();
     FSDataOutputStream out = fc.create(file,
         EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
         CreateOpts.createParent(), CreateOpts.bufferSize(4096),
         CreateOpts.repFac((short) 3));
-    executionTime[CREATE] += (System.currentTimeMillis()-startTime);
+    executionTime[CREATE] += (Time.now()-startTime);
     totalNumOfOps[CREATE]++;
 
     for (long i=0; i<fileSize; i++) {
       out.writeByte('a');
     }
-    startTime = System.currentTimeMillis();
+    startTime = Time.now();
     out.close();
-    executionTime[WRITE_CLOSE] += (System.currentTimeMillis()-startTime);
+    executionTime[WRITE_CLOSE] += (Time.now()-startTime);
     totalNumOfOps[WRITE_CLOSE]++;
   }
   

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java Fri Aug  3 19:00:15 2012
@@ -39,6 +39,7 @@ import java.util.TreeSet;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Time;
 
 /**
  * <p>
@@ -59,7 +60,7 @@ class InMemoryNativeFileSystemStore impl
   }
 
   public void storeEmptyFile(String key) throws IOException {
-    metadataMap.put(key, new FileMetadata(key, 0, System.currentTimeMillis()));
+    metadataMap.put(key, new FileMetadata(key, 0, Time.now()));
     dataMap.put(key, new byte[0]);
   }
 
@@ -81,7 +82,7 @@ class InMemoryNativeFileSystemStore impl
       }
     }
     metadataMap.put(key,
-        new FileMetadata(key, file.length(), System.currentTimeMillis()));
+        new FileMetadata(key, file.length(), Time.now()));
     dataMap.put(key, out.toByteArray());
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java Fri Aug  3 19:00:15 2012
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.viewfs.ChRootedFileSystem;
@@ -33,6 +34,7 @@ import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import static org.mockito.Mockito.*;
 
 public class TestChRootedFileSystem {
   FileSystem fSys; // The ChRoootedFs
@@ -314,4 +316,37 @@ public class TestChRootedFileSystem {
   public void testResolvePathNonExisting() throws IOException {
       fSys.resolvePath(new Path("/nonExisting"));
   }
-}
+  
+  @Test
+  public void testDeleteOnExitPathHandling() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+        
+    URI chrootUri = URI.create("mockfs://foo/a/b");
+    ChRootedFileSystem chrootFs = new ChRootedFileSystem(chrootUri, conf);
+    FileSystem mockFs = ((FilterFileSystem)chrootFs.getRawFileSystem())
+        .getRawFileSystem();
+    
+    // ensure delete propagates the correct path
+    Path chrootPath = new Path("/c");
+    Path rawPath = new Path("/a/b/c");
+    chrootFs.delete(chrootPath, false);
+    verify(mockFs).delete(eq(rawPath), eq(false));
+    reset(mockFs);
+ 
+    // fake that the path exists for deleteOnExit
+    FileStatus stat = mock(FileStatus.class);
+    when(mockFs.getFileStatus(eq(rawPath))).thenReturn(stat);
+    // ensure deleteOnExit propagates the correct path
+    chrootFs.deleteOnExit(chrootPath);
+    chrootFs.close();
+    verify(mockFs).delete(eq(rawPath), eq(true));
+  }
+
+  static class MockFileSystem extends FilterFileSystem {
+    MockFileSystem() {
+      super(mock(FileSystem.class));
+    }
+    public void initialize(URI name, Configuration conf) throws IOException {}
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java Fri Aug  3 19:00:15 2012
@@ -23,6 +23,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
 import org.apache.zookeeper.KeeperException.NoNodeException;
 import org.apache.zookeeper.data.Stat;
 import org.apache.zookeeper.server.ZooKeeperServer;
@@ -36,7 +37,7 @@ public abstract class ActiveStandbyElect
   public static void waitForActiveLockData(TestContext ctx,
       ZooKeeperServer zks, String parentDir, byte[] activeData)
       throws Exception {
-    long st = System.currentTimeMillis();
+    long st = Time.now();
     long lastPrint = st;
     while (true) {
       if (ctx != null) {
@@ -51,17 +52,17 @@ public abstract class ActiveStandbyElect
             Arrays.equals(activeData, data)) {
           return;
         }
-        if (System.currentTimeMillis() > lastPrint + LOG_INTERVAL_MS) {
+        if (Time.now() > lastPrint + LOG_INTERVAL_MS) {
           LOG.info("Cur data: " + StringUtils.byteToHexString(data));
-          lastPrint = System.currentTimeMillis();
+          lastPrint = Time.now();
         }
       } catch (NoNodeException nne) {
         if (activeData == null) {
           return;
         }
-        if (System.currentTimeMillis() > lastPrint + LOG_INTERVAL_MS) {
+        if (Time.now() > lastPrint + LOG_INTERVAL_MS) {
           LOG.info("Cur data: no node");
-          lastPrint = System.currentTimeMillis();
+          lastPrint = Time.now();
         }
       }
       Thread.sleep(50);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java Fri Aug  3 19:00:15 2012
@@ -32,6 +32,7 @@ import java.util.concurrent.CountDownLat
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
+import org.apache.hadoop.util.Time;
 import org.apache.zookeeper.PortAssignment;
 import org.apache.zookeeper.TestableZooKeeper;
 import org.apache.zookeeper.WatchedEvent;
@@ -111,11 +112,11 @@ public abstract class ClientBaseWithFixe
             return connected;
         }
         synchronized void waitForConnected(long timeout) throws InterruptedException, TimeoutException {
-            long expire = System.currentTimeMillis() + timeout;
+            long expire = Time.now() + timeout;
             long left = timeout;
             while(!connected && left > 0) {
                 wait(left);
-                left = expire - System.currentTimeMillis();
+                left = expire - Time.now();
             }
             if (!connected) {
                 throw new TimeoutException("Did not connect");
@@ -123,11 +124,11 @@ public abstract class ClientBaseWithFixe
             }
         }
         synchronized void waitForDisconnected(long timeout) throws InterruptedException, TimeoutException {
-            long expire = System.currentTimeMillis() + timeout;
+            long expire = Time.now() + timeout;
             long left = timeout;
             while(connected && left > 0) {
                 wait(left);
-                left = expire - System.currentTimeMillis();
+                left = expire - Time.now();
             }
             if (connected) {
                 throw new TimeoutException("Did not disconnect");
@@ -248,7 +249,7 @@ public abstract class ClientBaseWithFixe
     }
 
     public static boolean waitForServerUp(String hp, long timeout) {
-        long start = System.currentTimeMillis();
+        long start = Time.now();
         while (true) {
             try {
                 // if there are multiple hostports, just take the first one
@@ -263,7 +264,7 @@ public abstract class ClientBaseWithFixe
                 LOG.info("server " + hp + " not up " + e);
             }
 
-            if (System.currentTimeMillis() > start + timeout) {
+            if (Time.now() > start + timeout) {
                 break;
             }
             try {
@@ -275,7 +276,7 @@ public abstract class ClientBaseWithFixe
         return false;
     }
     public static boolean waitForServerDown(String hp, long timeout) {
-        long start = System.currentTimeMillis();
+        long start = Time.now();
         while (true) {
             try {
                 HostPort hpobj = parseHostPortList(hp).get(0);
@@ -284,7 +285,7 @@ public abstract class ClientBaseWithFixe
                 return true;
             }
 
-            if (System.currentTimeMillis() > start + timeout) {
+            if (Time.now() > start + timeout) {
                 break;
             }
             try {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java Fri Aug  3 19:00:15 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 import org.apache.hadoop.ha.HealthMonitor.Callback;
 import org.apache.hadoop.ha.HealthMonitor.State;
+import org.apache.hadoop.util.Time;
 
 import org.junit.Before;
 import org.junit.Test;
@@ -136,8 +137,8 @@ public class TestHealthMonitor {
 
   private void waitForState(HealthMonitor hm, State state)
       throws InterruptedException {
-    long st = System.currentTimeMillis();
-    while (System.currentTimeMillis() - st < 2000) {
+    long st = Time.now();
+    while (Time.now() - st < 2000) {
       if (hm.getHealthState() == state) {
         return;
       }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java Fri Aug  3 19:00:15 2012
@@ -28,6 +28,7 @@ import org.apache.hadoop.ha.HAServicePro
 import org.apache.hadoop.ha.HealthMonitor.State;
 import org.apache.hadoop.ha.MiniZKFCCluster.DummyZKFC;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Time;
 import org.apache.log4j.Level;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
@@ -394,9 +395,9 @@ public class TestZKFailoverController ex
       // Ask it to cede active for 3 seconds. It should respond promptly
       // (i.e. the RPC itself should not take 3 seconds!)
       ZKFCProtocol proxy = zkfc.getLocalTarget().getZKFCProxy(conf, 5000);
-      long st = System.currentTimeMillis();
+      long st = Time.now();
       proxy.cedeActive(3000);
-      long et = System.currentTimeMillis();
+      long et = Time.now();
       assertTrue("RPC to cedeActive took " + (et - st) + " ms",
           et - st < 1000);
       
@@ -408,7 +409,7 @@ public class TestZKFailoverController ex
       // After the prescribed 3 seconds, should go into STANDBY state,
       // since the other node in the cluster would have taken ACTIVE.
       cluster.waitForElectorState(0, ActiveStandbyElector.State.STANDBY);
-      long et2 = System.currentTimeMillis();
+      long et2 = Time.now();
       assertTrue("Should take ~3 seconds to rejoin. Only took " + (et2 - et) +
           "ms before rejoining.",
           et2 - et > 2800);      

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java Fri Aug  3 19:00:15 2012
@@ -21,6 +21,7 @@ import java.util.Random;
 
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Time;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -61,11 +62,11 @@ public class TestZKFailoverControllerStr
   @Test(timeout=(STRESS_RUNTIME_SECS + EXTRA_TIMEOUT_SECS) * 1000)
   public void testExpireBackAndForth() throws Exception {
     cluster.start();
-    long st = System.currentTimeMillis();
+    long st = Time.now();
     long runFor = STRESS_RUNTIME_SECS * 1000;
 
     int i = 0;
-    while (System.currentTimeMillis() - st < runFor) {
+    while (Time.now() - st < runFor) {
       // flip flop the services back and forth
       int from = i % 2;
       int to = (i + 1) % 2;
@@ -87,11 +88,11 @@ public class TestZKFailoverControllerStr
   @Test(timeout=(STRESS_RUNTIME_SECS + EXTRA_TIMEOUT_SECS) * 1000)
   public void testRandomExpirations() throws Exception {
     cluster.start();
-    long st = System.currentTimeMillis();
+    long st = Time.now();
     long runFor = STRESS_RUNTIME_SECS * 1000;
 
     Random r = new Random();
-    while (System.currentTimeMillis() - st < runFor) {
+    while (Time.now() - st < runFor) {
       cluster.getTestContext().checkException();
       int targetIdx = r.nextInt(2);
       ActiveStandbyElector target = cluster.getElector(targetIdx);
@@ -125,8 +126,8 @@ public class TestZKFailoverControllerStr
     // setting up the mock.
     cluster.start();
     
-    long st = System.currentTimeMillis();
-    while (System.currentTimeMillis() - st < runFor) {
+    long st = Time.now();
+    while (Time.now() - st < runFor) {
       cluster.getTestContext().checkException();
       serverFactory.closeAll();
       Thread.sleep(50);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java Fri Aug  3 19:00:15 2012
@@ -29,6 +29,7 @@ import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 
+import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
 import org.mockito.Mockito;
 
@@ -152,4 +153,26 @@ public class TestIOUtils {
       }
     }
   }
+
+  @Test
+  public void testWrappedReadForCompressedData() throws IOException {
+    byte[] buf = new byte[2];
+    InputStream mockStream = Mockito.mock(InputStream.class);
+    Mockito.when(mockStream.read(buf, 0, 1)).thenReturn(1);
+    Mockito.when(mockStream.read(buf, 0, 2)).thenThrow(
+        new java.lang.InternalError());
+
+    try {
+      assertEquals("Check expected value", 1,
+          IOUtils.wrappedReadForCompressedData(mockStream, buf, 0, 1));
+    } catch (IOException ioe) {
+      fail("Unexpected error while reading");
+    }
+    try {
+      IOUtils.wrappedReadForCompressedData(mockStream, buf, 0, 2);
+    } catch (IOException ioe) {
+      GenericTestUtils.assertExceptionContains(
+          "Error while reading compressed data", ioe);
+    }
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java Fri Aug  3 19:00:15 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.io.SequenceFile
 import org.apache.hadoop.io.SequenceFile.Metadata;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.conf.*;
 import org.mockito.Mockito;
@@ -557,6 +558,60 @@ public class TestSequenceFile extends Te
     // should succeed, fails if exception thrown
   }
 
+  public void testSerializationAvailability() throws IOException {
+    Configuration conf = new Configuration();
+    Path path = new Path(System.getProperty("test.build.data", "."),
+        "serializationAvailability");
+    // Check if any serializers aren't found.
+    try {
+      SequenceFile.createWriter(
+          conf,
+          SequenceFile.Writer.file(path),
+          SequenceFile.Writer.keyClass(String.class),
+          SequenceFile.Writer.valueClass(NullWritable.class));
+      // Note: This may also fail someday if JavaSerialization
+      // is activated by default.
+      fail("Must throw IOException for missing serializer for the Key class");
+    } catch (IOException e) {
+      assertTrue(e.getMessage().startsWith(
+        "Could not find a serializer for the Key class: '" +
+            String.class.getName() + "'."));
+    }
+    try {
+      SequenceFile.createWriter(
+          conf,
+          SequenceFile.Writer.file(path),
+          SequenceFile.Writer.keyClass(NullWritable.class),
+          SequenceFile.Writer.valueClass(String.class));
+      // Note: This may also fail someday if JavaSerialization
+      // is activated by default.
+      fail("Must throw IOException for missing serializer for the Value class");
+    } catch (IOException e) {
+      assertTrue(e.getMessage().startsWith(
+        "Could not find a serializer for the Value class: '" +
+            String.class.getName() + "'."));
+    }
+
+    // Write a simple file to test deserialization failures with
+    writeTest(FileSystem.get(conf), 1, 1, path, CompressionType.NONE, null);
+
+    // Remove Writable serializations, to enforce error.
+    conf.setStrings(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
+        AvroReflectSerialization.class.getName());
+
+    // Now check if any deserializers aren't found.
+    try {
+      new SequenceFile.Reader(
+          conf,
+          SequenceFile.Reader.file(path));
+      fail("Must throw IOException for missing deserializer for the Key class");
+    } catch (IOException e) {
+      assertTrue(e.getMessage().startsWith(
+        "Could not find a deserializer for the Key class: '" +
+            RandomDatum.class.getName() + "'."));
+    }
+  }
+
   /** For debugging and testing. */
   public static void main(String[] args) throws Exception {
     int count = 1024 * 1024;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1369164&r1=1369163&r2=1369164&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Fri Aug  3 19:00:15 2012
@@ -46,6 +46,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.RandomDatum;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
@@ -68,6 +69,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.junit.Assert;
+import org.junit.Assume;
 import org.junit.Test;
 import static org.junit.Assert.*;
 
@@ -514,6 +516,50 @@ public class TestCodec {
     LOG.info("SUCCESS! Completed SequenceFileCodecTest with codec \"" + codecClass + "\"");
   }
   
+  /**
+   * Regression test for HADOOP-8423: seeking in a block-compressed
+   * stream would not properly reset the block decompressor state.
+   */
+  @Test
+  public void testSnappyMapFile() throws Exception {
+    Assume.assumeTrue(SnappyCodec.isNativeCodeLoaded());
+    codecTestMapFile(SnappyCodec.class, CompressionType.BLOCK, 100);
+  }
+  
+  private void codecTestMapFile(Class<? extends CompressionCodec> clazz,
+      CompressionType type, int records) throws Exception {
+    
+    FileSystem fs = FileSystem.get(conf);
+    LOG.info("Creating MapFiles with " + records  + 
+            " records using codec " + clazz.getSimpleName());
+    Path path = new Path(new Path(
+        System.getProperty("test.build.data", "/tmp")),
+      clazz.getSimpleName() + "-" + type + "-" + records);
+
+    LOG.info("Writing " + path);
+    createMapFile(conf, fs, path, clazz.newInstance(), type, records);
+    MapFile.Reader reader = new MapFile.Reader(path, conf);
+    Text key1 = new Text("002");
+    assertNotNull(reader.get(key1, new Text()));
+    Text key2 = new Text("004");
+    assertNotNull(reader.get(key2, new Text()));
+  }
+  
+  private static void createMapFile(Configuration conf, FileSystem fs, Path path, 
+      CompressionCodec codec, CompressionType type, int records) throws IOException {
+    MapFile.Writer writer = 
+        new MapFile.Writer(conf, path,
+            MapFile.Writer.keyClass(Text.class),
+            MapFile.Writer.valueClass(Text.class),
+            MapFile.Writer.compression(type, codec));
+    Text key = new Text();
+    for (int j = 0; j < records; j++) {
+        key.set(String.format("%03d", j));
+        writer.append(key, key);
+    }
+    writer.close();
+  }
+
   public static void main(String[] args) throws IOException {
     int count = 10000;
     String codecClass = "org.apache.hadoop.io.compress.DefaultCodec";