You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2013/10/30 23:22:15 UTC

svn commit: r1537330 [4/7] - in /hadoop/common/branches/YARN-321/hadoop-common-project: ./ hadoop-annotations/ hadoop-auth/ hadoop-common/ hadoop-common/dev-support/ hadoop-common/src/ hadoop-common/src/main/bin/ hadoop-common/src/main/conf/ hadoop-com...

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java Wed Oct 30 22:21:59 2013
@@ -59,4 +59,12 @@ public interface DNSToSwitchMapping {
    * will get a chance to see the new data.
    */
   public void reloadCachedMappings();
+  
+  /**
+   * Reload cached mappings on specific nodes.
+   *
+   * If there is a cache on these nodes, this method will clear it, so that 
+   * future accesses will see updated data.
+   */
+  public void reloadCachedMappings(List<String> names);
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java Wed Oct 30 22:21:59 2013
@@ -269,5 +269,11 @@ public final class ScriptBasedMapping ex
       // Nothing to do here, since RawScriptBasedMapping has no cache, and
       // does not inherit from CachedDNSToSwitchMapping
     }
+
+    @Override
+    public void reloadCachedMappings(List<String> names) {
+      // Nothing to do here, since RawScriptBasedMapping has no cache, and
+      // does not inherit from CachedDNSToSwitchMapping
+    }
   }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java Wed Oct 30 22:21:59 2013
@@ -162,5 +162,12 @@ public class TableMapping extends Cached
         }
       }
     }
+
+    @Override
+    public void reloadCachedMappings(List<String> names) {
+      // TableMapping has to reload all mappings at once, so no chance to 
+      // reload mappings on specific nodes
+      reloadCachedMappings();
+    }
   }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Wed Oct 30 22:21:59 2013
@@ -45,11 +45,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.ipc.Server.Connection;
+import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.security.token.SecretManager;
-import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+import org.apache.hadoop.security.token.TokenIdentifier;
 
 /**
  * A utility class for dealing with SASL on RPC server
@@ -267,13 +269,15 @@ public class SaslRpcServer {
       this.connection = connection;
     }
 
-    private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken {
-      return encodePassword(secretManager.retrievePassword(tokenid));
+    private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken,
+        StandbyException, RetriableException, IOException {
+      return encodePassword(secretManager.retriableRetrievePassword(tokenid));
     }
 
     @Override
     public void handle(Callback[] callbacks) throws InvalidToken,
-        UnsupportedCallbackException {
+        UnsupportedCallbackException, StandbyException, RetriableException,
+        IOException {
       NameCallback nc = null;
       PasswordCallback pc = null;
       AuthorizeCallback ac = null;
@@ -292,7 +296,8 @@ public class SaslRpcServer {
         }
       }
       if (pc != null) {
-        TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager);
+        TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(),
+            secretManager);
         char[] password = getPassword(tokenIdentifier);
         UserGroupInformation user = null;
         user = tokenIdentifier.getUser(); // may throw exception

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Wed Oct 30 22:21:59 2013
@@ -31,6 +31,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -1313,7 +1314,14 @@ public class UserGroupInformation {
    * @return Credentials of tokens associated with this user
    */
   public synchronized Credentials getCredentials() {
-    return new Credentials(getCredentialsInternal());
+    Credentials creds = new Credentials(getCredentialsInternal());
+    Iterator<Token<?>> iter = creds.getAllTokens().iterator();
+    while (iter.hasNext()) {
+      if (iter.next() instanceof Token.PrivateToken) {
+        iter.remove();
+      }
+    }
+    return creds;
   }
   
   /**

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java Wed Oct 30 22:21:59 2013
@@ -53,6 +53,8 @@ public class FileBasedKeyStoresFactory i
     "ssl.{0}.keystore.location";
   public static final String SSL_KEYSTORE_PASSWORD_TPL_KEY =
     "ssl.{0}.keystore.password";
+  public static final String SSL_KEYSTORE_KEYPASSWORD_TPL_KEY =
+    "ssl.{0}.keystore.keypassword";
   public static final String SSL_KEYSTORE_TYPE_TPL_KEY =
     "ssl.{0}.keystore.type";
 
@@ -136,7 +138,7 @@ public class FileBasedKeyStoresFactory i
       conf.get(resolvePropertyName(mode, SSL_KEYSTORE_TYPE_TPL_KEY),
                DEFAULT_KEYSTORE_TYPE);
     KeyStore keystore = KeyStore.getInstance(keystoreType);
-    String keystorePassword = null;
+    String keystoreKeyPassword = null;
     if (requireClientCert || mode == SSLFactory.Mode.SERVER) {
       String locationProperty =
         resolvePropertyName(mode, SSL_KEYSTORE_LOCATION_TPL_KEY);
@@ -147,11 +149,17 @@ public class FileBasedKeyStoresFactory i
       }
       String passwordProperty =
         resolvePropertyName(mode, SSL_KEYSTORE_PASSWORD_TPL_KEY);
-      keystorePassword = conf.get(passwordProperty, "");
+      String keystorePassword = conf.get(passwordProperty, "");
       if (keystorePassword.isEmpty()) {
         throw new GeneralSecurityException("The property '" + passwordProperty +
           "' has not been set in the ssl configuration file.");
       }
+      String keyPasswordProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_KEYPASSWORD_TPL_KEY);
+      // Key password defaults to the same value as store password for
+      // compatibility with legacy configurations that did not use a separate
+      // configuration property for key password.
+      keystoreKeyPassword = conf.get(keyPasswordProperty, keystorePassword);
       LOG.debug(mode.toString() + " KeyStore: " + keystoreLocation);
 
       InputStream is = new FileInputStream(keystoreLocation);
@@ -167,8 +175,8 @@ public class FileBasedKeyStoresFactory i
     KeyManagerFactory keyMgrFactory = KeyManagerFactory
         .getInstance(SSLFactory.SSLCERTIFICATE);
       
-    keyMgrFactory.init(keystore, (keystorePassword != null) ?
-                                 keystorePassword.toCharArray() : null);
+    keyMgrFactory.init(keystore, (keystoreKeyPassword != null) ?
+                                 keystoreKeyPassword.toCharArray() : null);
     keyManagers = keyMgrFactory.getKeyManagers();
 
     //trust store

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java Wed Oct 30 22:21:59 2013
@@ -29,6 +29,7 @@ import javax.crypto.spec.SecretKeySpec;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.StandbyException;
 
 
@@ -66,7 +67,29 @@ public abstract class SecretManager<T ex
    * @return the password to use
    * @throws InvalidToken the token was invalid
    */
-  public abstract byte[] retrievePassword(T identifier) throws InvalidToken;
+  public abstract byte[] retrievePassword(T identifier)
+      throws InvalidToken;
+  
+  /**
+   * The same functionality with {@link #retrievePassword}, except that this 
+   * method can throw a {@link RetriableException} or a {@link StandbyException}
+   * to indicate that client can retry/failover the same operation because of 
+   * temporary issue on the server side.
+   * 
+   * @param identifier the identifier to validate
+   * @return the password to use
+   * @throws InvalidToken the token was invalid
+   * @throws StandbyException the server is in standby state, the client can
+   *         try other servers
+   * @throws RetriableException the token was invalid, and the server thinks 
+   *         this may be a temporary issue and suggests the client to retry
+   * @throws IOException to allow future exceptions to be added without breaking
+   *         compatibility        
+   */
+  public byte[] retriableRetrievePassword(T identifier)
+      throws InvalidToken, StandbyException, RetriableException, IOException {
+    return retrievePassword(identifier);
+  }
   
   /**
    * Create an empty token identifier.

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java Wed Oct 30 22:21:59 2013
@@ -19,31 +19,20 @@
 package org.apache.hadoop.security.token;
 
 import com.google.common.collect.Maps;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.ServiceLoader;
-
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-  
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import java.io.*;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.ServiceLoader;
+
 /**
  * The client-side form of the token.
  */
@@ -195,6 +184,19 @@ public class Token<T extends TokenIdenti
     service = newService;
   }
 
+  /**
+   * Indicates whether the token is a clone.  Used by HA failover proxy
+   * to indicate a token should not be visible to the user via
+   * UGI.getCredentials()
+   */
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  public static class PrivateToken<T extends TokenIdentifier> extends Token<T> {
+    public PrivateToken(Token<T> token) {
+      super(token);
+    }
+  }
+
   @Override
   public void readFields(DataInput in) throws IOException {
     int len = WritableUtils.readVInt(in);

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenIdentifier.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenIdentifier.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenIdentifier.java Wed Oct 30 22:21:59 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.security.token
 import java.io.IOException;
 import java.util.Arrays;
 
+import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -35,6 +36,9 @@ import org.apache.hadoop.security.UserGr
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public abstract class TokenIdentifier implements Writable {
+
+  private String trackingId = null;
+
   /**
    * Get the token kind
    * @return the kind of the token
@@ -62,4 +66,19 @@ public abstract class TokenIdentifier im
     }
     return Arrays.copyOf(buf.getData(), buf.getLength());
   }
+
+  /**
+   * Returns a tracking identifier that can be used to associate usages of a
+   * token across multiple client sessions.
+   *
+   * Currently, this function just returns an MD5 of {{@link #getBytes()}.
+   *
+   * @return tracking identifier
+   */
+  public String getTrackingId() {
+    if (trackingId == null) {
+      trackingId = DigestUtils.md5Hex(getBytes());
+    }
+    return trackingId;
+  }
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Wed Oct 30 22:21:59 2013
@@ -45,7 +45,7 @@ import org.apache.hadoop.util.Time;
 
 import com.google.common.base.Preconditions;
 
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "Hive"})
 @InterfaceStability.Evolving
 public abstract 
 class AbstractDelegationTokenSecretManager<TokenIdent 
@@ -86,6 +86,11 @@ extends AbstractDelegationTokenIdentifie
   private long tokenMaxLifetime;
   private long tokenRemoverScanInterval;
   private long tokenRenewInterval;
+  /**
+   * Whether to store a token's tracking ID in its TokenInformation.
+   * Can be overridden by a subclass.
+   */
+  protected boolean storeTokenTrackingId;
   private Thread tokenRemoverThread;
   protected volatile boolean running;
 
@@ -102,6 +107,7 @@ extends AbstractDelegationTokenIdentifie
     this.tokenMaxLifetime = delegationTokenMaxLifetime;
     this.tokenRenewInterval = delegationTokenRenewInterval;
     this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
+    this.storeTokenTrackingId = false;
   }
 
   /** should be called before this object is used */
@@ -201,7 +207,7 @@ extends AbstractDelegationTokenIdentifie
     }
     if (currentTokens.get(identifier) == null) {
       currentTokens.put(identifier, new DelegationTokenInformation(renewDate,
-          password));
+          password, getTrackingIdIfEnabled(identifier)));
     } else {
       throw new IOException(
           "Same delegation token being added twice.");
@@ -280,23 +286,48 @@ extends AbstractDelegationTokenIdentifie
     byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
     storeNewToken(identifier, now + tokenRenewInterval);
     currentTokens.put(identifier, new DelegationTokenInformation(now
-        + tokenRenewInterval, password));
+        + tokenRenewInterval, password, getTrackingIdIfEnabled(identifier)));
     return password;
   }
-
-  @Override
-  public synchronized byte[] retrievePassword(TokenIdent identifier)
+  
+  /**
+   * Find the DelegationTokenInformation for the given token id, and verify that
+   * if the token is expired. Note that this method should be called with 
+   * acquiring the secret manager's monitor.
+   */
+  protected DelegationTokenInformation checkToken(TokenIdent identifier)
       throws InvalidToken {
+    assert Thread.holdsLock(this);
     DelegationTokenInformation info = currentTokens.get(identifier);
     if (info == null) {
       throw new InvalidToken("token (" + identifier.toString()
           + ") can't be found in cache");
     }
-    long now = Time.now();
-    if (info.getRenewDate() < now) {
+    if (info.getRenewDate() < Time.now()) {
       throw new InvalidToken("token (" + identifier.toString() + ") is expired");
     }
-    return info.getPassword();
+    return info;
+  }
+  
+  @Override
+  public synchronized byte[] retrievePassword(TokenIdent identifier)
+      throws InvalidToken {
+    return checkToken(identifier).getPassword();
+  }
+
+  protected String getTrackingIdIfEnabled(TokenIdent ident) {
+    if (storeTokenTrackingId) {
+      return ident.getTrackingId();
+    }
+    return null;
+  }
+
+  public synchronized String getTokenTrackingId(TokenIdent identifier) {
+    DelegationTokenInformation info = currentTokens.get(identifier);
+    if (info == null) {
+      return null;
+    }
+    return info.getTrackingId();
   }
 
   /**
@@ -359,8 +390,9 @@ extends AbstractDelegationTokenIdentifie
           + " is trying to renew a token with " + "wrong password");
     }
     long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
+    String trackingId = getTrackingIdIfEnabled(id);
     DelegationTokenInformation info = new DelegationTokenInformation(renewTime,
-        password);
+        password, trackingId);
 
     if (currentTokens.get(id) == null) {
       throw new InvalidToken("Renewal request for unknown token");
@@ -420,9 +452,17 @@ extends AbstractDelegationTokenIdentifie
   public static class DelegationTokenInformation {
     long renewDate;
     byte[] password;
+    String trackingId;
+
     public DelegationTokenInformation(long renewDate, byte[] password) {
+      this(renewDate, password, null);
+    }
+
+    public DelegationTokenInformation(long renewDate, byte[] password,
+        String trackingId) {
       this.renewDate = renewDate;
       this.password = password;
+      this.trackingId = trackingId;
     }
     /** returns renew date */
     public long getRenewDate() {
@@ -432,6 +472,10 @@ extends AbstractDelegationTokenIdentifie
     byte[] getPassword() {
       return password;
     }
+    /** returns tracking id */
+    public String getTrackingId() {
+      return trackingId;
+    }
   }
   
   /** Remove expired delegation tokens from cache */

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java Wed Oct 30 22:21:59 2013
@@ -154,7 +154,7 @@ public class ReflectionUtils {
    * @param stream the stream to
    * @param title a string title for the stack trace
    */
-  public static void printThreadInfo(PrintWriter stream,
+  public synchronized static void printThreadInfo(PrintWriter stream,
                                      String title) {
     final int STACK_DEPTH = 20;
     boolean contention = threadBean.isThreadContentionMonitoringEnabled();

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java Wed Oct 30 22:21:59 2013
@@ -48,6 +48,9 @@ public class VersionInfo {
     try {
       InputStream is = Thread.currentThread().getContextClassLoader()
         .getResourceAsStream(versionInfoFile);
+      if (is == null) {
+        throw new IOException("Resource not found");
+      }
       info.load(is);
     } catch (IOException ex) {
       LogFactory.getLog(getClass()).warn("Could not read '" + 

Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/native/native.sln
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj Wed Oct 30 22:21:59 2013
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf-8"?>
+<?xml version="1.0" encoding="utf-8"?>
 
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more

Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto Wed Oct 30 22:21:59 2013
@@ -60,8 +60,8 @@ message RequestHeaderProto {
    * ProtocolInfoProto) since they reuse the connection; in this case
    * the declaringClassProtocolName field is set to the ProtocolInfoProto
    */
-  required string declaringClassProtocolName = 3;
+  required string declaringClassProtocolName = 2;
   
   /** protocol version of class declaring the called method */
-  required uint64 clientProtocolVersion = 4;
+  required uint64 clientProtocolVersion = 3;
 }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto Wed Oct 30 22:21:59 2013
@@ -62,7 +62,7 @@ message RpcRequestHeaderProto { // the h
 
   optional RpcKindProto rpcKind = 1;
   optional OperationProto rpcOp = 2;
-  required uint32 callId = 3; // a sequence number that is sent back in response
+  required sint32 callId = 3; // a sequence number that is sent back in response
   required bytes clientId = 4; // Globally unique client ID
   // clientId + callId uniquely identifies a request
   // retry count, 1 means this is the first retry

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Wed Oct 30 22:21:59 2013
@@ -492,6 +492,11 @@
   </description>
 </property>
 
+<property>
+  <name>fs.swift.impl</name>
+  <value>org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem</value>
+  <description>The implementation class of the OpenStack Swift Filesystem</description>
+</property>
 
 <property>
   <name>fs.automatic.close</name>
@@ -1219,4 +1224,19 @@
   </description>
 </property>
 
+<property>
+  <name>nfs3.server.port</name>
+  <value>2049</value>
+  <description>
+      Specify the port number used by Hadoop NFS.
+  </description>
+</property>
+
+<property>
+  <name>nfs3.mountd.port</name>
+  <value>4242</value>
+  <description>
+      Specify the port number used by Hadoop mount daemon.
+  </description>
+</property>
 </configuration>

Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm Wed Oct 30 22:21:59 2013
@@ -311,7 +311,7 @@ Hadoop MapReduce Next Generation - Clust
 | | | Only applicable if log-aggregation is enabled. |
 *-------------------------+-------------------------+------------------------+
 | <<<yarn.nodemanager.aux-services>>> | | |
-| | mapreduce.shuffle  | |
+| | mapreduce_shuffle  | |
 | | | Shuffle service that needs to be set for Map Reduce applications. |
 *-------------------------+-------------------------+------------------------+
 
@@ -854,8 +854,10 @@ KVNO Timestamp         Principal
 | | The container process has the same Unix user as the NodeManager.  |
 *--------------------------------------+--------------------------------------+
 | <<<LinuxContainerExecutor>>>               | |
-| | Supported only on GNU/Linux, this executor runs the containers as the |
-| | user who submitted the application. It requires all user accounts to be |
+| | Supported only on GNU/Linux, this executor runs the containers as either the |
+| | YARN user who submitted the application (when full security is enabled) or |
+| | as a dedicated user (defaults to nobody) when full security is not enabled. |
+| | When full security is enabled, this executor requires all user accounts to be |
 | | created on the cluster nodes where the containers are launched. It uses |
 | | a <setuid> executable that is included in the Hadoop distribution. |
 | | The NodeManager uses this executable to launch and kill containers. |
@@ -929,6 +931,8 @@ KVNO Timestamp         Principal
 *-------------------------+-------------------------+------------------------+
 | <<<banned.users>>> | hfds,yarn,mapred,bin | Banned users. |
 *-------------------------+-------------------------+------------------------+
+| <<<allowed.system.users>>> | foo,bar | Allowed system users. |
+*-------------------------+-------------------------+------------------------+
 | <<<min.user.id>>> | 1000 | Prevent other super-users. |
 *-------------------------+-------------------------+------------------------+
 

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm Wed Oct 30 22:21:59 2013
@@ -381,17 +381,22 @@ rmr
 
 setrep
 
-   Usage: <<<hdfs dfs -setrep [-R] <path> >>>
+   Usage: <<<hdfs dfs -setrep [-R] [-w] <numReplicas> <path> >>>
 
-   Changes the replication factor of a file.
+   Changes the replication factor of a file. If <path> is a directory then
+   the command recursively changes the replication factor of all files under
+   the directory tree rooted at <path>.
 
    Options:
 
-     * The -R option will recursively increase the replication factor of files within a directory.
+     * The -w flag requests that the command wait for the replication
+       to complete. This can potentially take a very long time.
+
+     * The -R flag is accepted for backwards compatibility. It has no effect.
 
    Example:
 
-     * <<<hdfs dfs -setrep -w 3 -R /user/hadoop/dir1>>>
+     * <<<hdfs dfs -setrep -w 3 /user/hadoop/dir1>>>
 
    Exit Code:
 

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm Wed Oct 30 22:21:59 2013
@@ -140,7 +140,7 @@ Add the following configs to your <<<yar
 
   <property>
     <name>yarn.nodemanager.aux-services</name>
-    <value>mapreduce.shuffle</value>
+    <value>mapreduce_shuffle</value>
     <description>shuffle service that needs to be set for Map Reduce to run </description>
   </property>
 +---+

Propchange: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core:r1513206-1537326
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1531125

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java Wed Oct 30 22:21:59 2013
@@ -21,11 +21,10 @@ package org.apache.hadoop.cli;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.cli.util.*;
-import org.apache.hadoop.cli.util.CLITestCmd;
-import org.apache.hadoop.cli.util.CLICommand;
 import org.apache.hadoop.cli.util.CommandExecutor.Result;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -369,6 +368,7 @@ public class CLITestHelper {
     CLITestData td = null;
     ArrayList<CLICommand> testCommands = null;
     ArrayList<CLICommand> cleanupCommands = null;
+    boolean runOnWindows = true;
     
     @Override
     public void startDocument() throws SAXException {
@@ -399,6 +399,8 @@ public class CLITestHelper {
         throws SAXException {
       if (qName.equals("description")) {
         td.setTestDesc(charString);
+      } else if (qName.equals("windows")) {
+          runOnWindows = Boolean.parseBoolean(charString);
       } else if (qName.equals("test-commands")) {
         td.setTestCommands(testCommands);
         testCommands = null;
@@ -420,8 +422,11 @@ public class CLITestHelper {
       } else if (qName.equals("expected-output")) {
         comparatorData.setExpectedOutput(charString);
       } else if (qName.equals("test")) {
-        testsFromConfigFile.add(td);
+        if (!Shell.WINDOWS || runOnWindows) {
+          testsFromConfigFile.add(td);
+        }
         td = null;
+        runOnWindows = true;
       } else if (qName.equals("mode")) {
         testMode = charString;
         if (!testMode.equals(TESTMODE_NOCOMPARE) &&

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java Wed Oct 30 22:21:59 2013
@@ -27,6 +27,8 @@ import org.apache.hadoop.fs.FileSystem.S
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.util.concurrent.Uninterruptibles;
+
 import static org.apache.hadoop.fs.FileContextTestHelper.*;
 
 /**
@@ -44,6 +46,38 @@ public abstract class FCStatisticsBaseTe
   //fc should be set appropriately by the deriving test.
   protected static FileContext fc = null;
   
+  @Test(timeout=60000)
+  public void testStatisticsOperations() throws Exception {
+    final Statistics stats = new Statistics("file");
+    Assert.assertEquals(0L, stats.getBytesRead());
+    Assert.assertEquals(0L, stats.getBytesWritten());
+    Assert.assertEquals(0, stats.getWriteOps());
+    stats.incrementBytesWritten(1000);
+    Assert.assertEquals(1000L, stats.getBytesWritten());
+    Assert.assertEquals(0, stats.getWriteOps());
+    stats.incrementWriteOps(123);
+    Assert.assertEquals(123, stats.getWriteOps());
+    
+    Thread thread = new Thread() {
+      @Override
+      public void run() {
+        stats.incrementWriteOps(1);
+      }
+    };
+    thread.start();
+    Uninterruptibles.joinUninterruptibly(thread);
+    Assert.assertEquals(124, stats.getWriteOps());
+    // Test copy constructor and reset function
+    Statistics stats2 = new Statistics(stats);
+    stats.reset();
+    Assert.assertEquals(0, stats.getWriteOps());
+    Assert.assertEquals(0L, stats.getBytesWritten());
+    Assert.assertEquals(0L, stats.getBytesRead());
+    Assert.assertEquals(124, stats2.getWriteOps());
+    Assert.assertEquals(1000L, stats2.getBytesWritten());
+    Assert.assertEquals(0L, stats2.getBytesRead());
+  }
+
   @Test
   public void testStatistics() throws IOException, URISyntaxException {
     URI fsUri = getFsUri();

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java Wed Oct 30 22:21:59 2013
@@ -944,14 +944,20 @@ public abstract class FSMainOperationsBa
       rename(src, dst, false, true, false, Rename.NONE);
       Assert.fail("Expected exception was not thrown");
     } catch (IOException e) {
-      Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
+      IOException ioException = unwrapException(e);
+      if (!(ioException instanceof FileNotFoundException)) {
+        throw ioException;
+      }
     }
 
     try {
       rename(src, dst, false, true, false, Rename.OVERWRITE);
       Assert.fail("Expected exception was not thrown");
     } catch (IOException e) {
-      Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
+      IOException ioException = unwrapException(e);
+      if (!(ioException instanceof FileNotFoundException)) {
+        throw ioException;
+      }
     }
   }
 

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java Wed Oct 30 22:21:59 2013
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.EnumSet;
+import java.util.NoSuchElementException;
 
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -30,7 +31,7 @@ import org.apache.hadoop.fs.Options.Rena
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.junit.After;
 import org.junit.Assert;
-import org.junit.Assume;
+import static org.junit.Assert.*;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -93,7 +94,7 @@ public abstract class FileContextMainOpe
     }     
   };
   
-  private static byte[] data = getFileData(numBlocks,
+  private static final byte[] data = getFileData(numBlocks,
       getDefaultBlockSize());
   
   @Before
@@ -108,7 +109,8 @@ public abstract class FileContextMainOpe
   
   @After
   public void tearDown() throws Exception {
-    fc.delete(new Path(fileContextTestHelper.getAbsoluteTestRootPath(fc), new Path("test")), true);
+    boolean del = fc.delete(new Path(fileContextTestHelper.getAbsoluteTestRootPath(fc), new Path("test")), true);
+    assertTrue(del);
     fc.delete(localFsRootPath, true);
   }
   
@@ -195,6 +197,14 @@ public abstract class FileContextMainOpe
     fc.setWorkingDirectory(absoluteDir);
     Assert.assertEquals(absoluteDir, fc.getWorkingDirectory());
 
+    Path aRegularFile = new Path("aRegularFile");
+    createFile(aRegularFile);
+    try {
+      fc.setWorkingDirectory(aRegularFile);
+      fail("An IOException expected.");
+    } catch (IOException ioe) {
+      // okay
+    }
   }
   
   @Test
@@ -633,20 +643,6 @@ public abstract class FileContextMainOpe
         filteredPaths));
   }
   
-  protected Path getHiddenPathForTest() {
-    return null;
-  }
-  
-  @Test
-  public void testGlobStatusFilterWithHiddenPathTrivialFilter()
-      throws Exception {
-    Path hidden = getHiddenPathForTest();
-    Assume.assumeNotNull(hidden);
-    FileStatus[] filteredPaths = fc.util().globStatus(hidden, DEFAULT_FILTER);
-    Assert.assertNotNull(filteredPaths);
-    Assert.assertEquals(1, filteredPaths.length);
-  }
-
   @Test
   public void testWriteReadAndDeleteEmptyFile() throws Exception {
     writeReadAndDelete(0);
@@ -1210,6 +1206,136 @@ public abstract class FileContextMainOpe
         return true;
       }
     return false;
+ }
+
+  @Test
+  public void testOpen2() throws IOException {
+    final Path rootPath = getTestRootPath(fc, "test");
+    //final Path rootPath = getAbsoluteTestRootPath(fc);
+    final Path path = new Path(rootPath, "zoo");
+    createFile(path);
+    final long length = fc.getFileStatus(path).getLen();
+    FSDataInputStream fsdis = fc.open(path, 2048);
+    try {
+      byte[] bb = new byte[(int)length];
+      fsdis.readFully(bb);
+      assertArrayEquals(data, bb);
+    } finally {
+      fsdis.close();
+    }
+  }
+
+  @Test
+  public void testSetVerifyChecksum() throws IOException {
+    final Path rootPath = getTestRootPath(fc, "test");
+    final Path path = new Path(rootPath, "zoo");
+
+    FSDataOutputStream out = fc.create(path, EnumSet.of(CREATE),
+        Options.CreateOpts.createParent());
+    try {
+      // instruct FS to verify checksum through the FileContext:
+      fc.setVerifyChecksum(true, path);
+      out.write(data, 0, data.length);
+    } finally {
+      out.close();
+    }
+
+    // NB: underlying FS may be different (this is an abstract test),
+    // so we cannot assert .zoo.crc existence.
+    // Instead, we check that the file is read correctly:
+    FileStatus fileStatus = fc.getFileStatus(path);
+    final long len = fileStatus.getLen();
+    assertTrue(len == data.length);
+    byte[] bb = new byte[(int)len];
+    FSDataInputStream fsdis = fc.open(path);
+    try {
+      fsdis.read(bb);
+    } finally {
+      fsdis.close();
+    }
+    assertArrayEquals(data, bb);
+  }
+
+  @Test
+  public void testListCorruptFileBlocks() throws IOException {
+    final Path rootPath = getTestRootPath(fc, "test");
+    final Path path = new Path(rootPath, "zoo");
+    createFile(path);
+    try {
+      final RemoteIterator<Path> remoteIterator = fc
+          .listCorruptFileBlocks(path);
+      if (listCorruptedBlocksSupported()) {
+        assertTrue(remoteIterator != null);
+        Path p;
+        while (remoteIterator.hasNext()) {
+          p = remoteIterator.next();
+          System.out.println("corrupted block: " + p);
+        }
+        try {
+          remoteIterator.next();
+          fail();
+        } catch (NoSuchElementException nsee) {
+          // okay
+        }
+      } else {
+        fail();
+      }
+    } catch (UnsupportedOperationException uoe) {
+      if (listCorruptedBlocksSupported()) {
+        fail(uoe.toString());
+      } else {
+        // okay
+      }
+    }
+  }
+
+  protected abstract boolean listCorruptedBlocksSupported();
+
+  @Test
+  public void testDeleteOnExitUnexisting() throws IOException {
+    final Path rootPath = getTestRootPath(fc, "test");
+    final Path path = new Path(rootPath, "zoo");
+    boolean registered = fc.deleteOnExit(path);
+    // because "zoo" does not exist:
+    assertTrue(!registered);
+  }
+
+  @Test
+  public void testFileContextStatistics() throws IOException {
+    FileContext.clearStatistics();
+
+    final Path rootPath = getTestRootPath(fc, "test");
+    final Path path = new Path(rootPath, "zoo");
+    createFile(path);
+    byte[] bb = new byte[data.length];
+    FSDataInputStream fsdis = fc.open(path);
+    try {
+      fsdis.read(bb);
+    } finally {
+      fsdis.close();
+    }
+    assertArrayEquals(data, bb);
+
+    FileContext.printStatistics();
+  }
+
+  @Test
+  /*
+   * Test method
+   *  org.apache.hadoop.fs.FileContext.getFileContext(AbstractFileSystem)
+   */
+  public void testGetFileContext1() throws IOException {
+    final Path rootPath = getTestRootPath(fc, "test");
+    AbstractFileSystem asf = fc.getDefaultFileSystem();
+    // create FileContext using the protected #getFileContext(1) method:
+    FileContext fc2 = FileContext.getFileContext(asf);
+    // Now just check that this context can do something reasonable:
+    final Path path = new Path(rootPath, "zoo");
+    FSDataOutputStream out = fc2.create(path, EnumSet.of(CREATE),
+        Options.CreateOpts.createParent());
+    out.close();
+    Path pathResolved = fc2.resolvePath(path);
+    assertEquals(pathResolved.toUri().getPath(), path.toUri().getPath());
   }
   
   private Path getTestRootPath(FileContext fc, String pathString) {

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java Wed Oct 30 22:21:59 2013
@@ -35,6 +35,7 @@ import org.junit.Test;
 
 import static org.apache.hadoop.fs.FileContextTestHelper.*;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
 /**
  * <p>
@@ -174,6 +175,13 @@ public abstract class FileContextPermiss
         System.out.println("Not testing changing the group since user " +
                            "belongs to only one group.");
       }
+      
+      try {
+        fc.setOwner(f, null, null);
+        fail("Exception expected.");
+      } catch (IllegalArgumentException iae) {
+        // okay
+      }
     } 
     finally {cleanupFile(fc, f);}
   }

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java Wed Oct 30 22:21:59 2013
@@ -270,7 +270,7 @@ public abstract class FileSystemContract
   
   public void testWriteInNonExistentDirectory() throws IOException {
     Path path = path("/test/hadoop/file");
-    assertFalse("Parent doesn't exist", fs.exists(path.getParent()));
+    assertFalse("Parent exists", fs.exists(path.getParent()));
     createFile(path);
     
     assertTrue("Exists", fs.exists(path));
@@ -280,7 +280,7 @@ public abstract class FileSystemContract
 
   public void testDeleteNonExistentFile() throws IOException {
     Path path = path("/test/hadoop/file");    
-    assertFalse("Doesn't exist", fs.exists(path));
+    assertFalse("Path exists: " + path, fs.exists(path));
     assertFalse("No deletion", fs.delete(path, true));
   }
   

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java Wed Oct 30 22:21:59 2013
@@ -24,6 +24,8 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URI;
 import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,15 +34,20 @@ import java.util.List;
 import java.util.jar.Attributes;
 import java.util.jar.JarFile;
 import java.util.jar.Manifest;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.tools.tar.TarEntry;
+import org.apache.tools.tar.TarOutputStream;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Test;
+import static org.junit.Assert.*;
 
 public class TestFileUtil {
   private static final Log LOG = LogFactory.getLog(TestFileUtil.class);
@@ -48,14 +55,14 @@ public class TestFileUtil {
   private static final String TEST_ROOT_DIR = System.getProperty(
       "test.build.data", "/tmp") + "/fu";
   private static final File TEST_DIR = new File(TEST_ROOT_DIR);
-  private static String FILE = "x";
-  private static String LINK = "y";
-  private static String DIR = "dir";
-  private File del = new File(TEST_DIR, "del");
-  private File tmp = new File(TEST_DIR, "tmp");
-  private File dir1 = new File(del, DIR + "1");
-  private File dir2 = new File(del, DIR + "2");
-  private File partitioned = new File(TEST_DIR, "partitioned");
+  private static final String FILE = "x";
+  private static final String LINK = "y";
+  private static final String DIR = "dir";
+  private final File del = new File(TEST_DIR, "del");
+  private final File tmp = new File(TEST_DIR, "tmp");
+  private final File dir1 = new File(del, DIR + "1");
+  private final File dir2 = new File(del, DIR + "2");
+  private final File partitioned = new File(TEST_DIR, "partitioned");
 
   /**
    * Creates multiple directories for testing.
@@ -116,17 +123,17 @@ public class TestFileUtil {
    * @param contents String non-null file contents.
    * @throws IOException if an I/O error occurs.
    */
-  private void createFile(File directory, String name, String contents)
+  private File createFile(File directory, String name, String contents)
       throws IOException {
     File newFile = new File(directory, name);
     PrintWriter pw = new PrintWriter(newFile);
-
     try {
       pw.println(contents);
     }
     finally {
       pw.close();
     }
+    return newFile;
   }
 
   @Test (timeout = 30000)
@@ -553,14 +560,283 @@ public class TestFileUtil {
    * @throws IOException
    */
   @Test (timeout = 30000)
-  public void testGetDU() throws IOException {
+  public void testGetDU() throws Exception {
     setupDirs();
 
     long du = FileUtil.getDU(TEST_DIR);
     // Only two files (in partitioned).  Each has 3 characters + system-specific
     // line separator.
-    long expected = 2 * (3 + System.getProperty("line.separator").length());
+    final long expected = 2 * (3 + System.getProperty("line.separator").length());
     Assert.assertEquals(expected, du);
+    
+    // target file does not exist:
+    final File doesNotExist = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
+    long duDoesNotExist = FileUtil.getDU(doesNotExist);
+    assertEquals(0, duDoesNotExist);
+    
+    // target file is not a directory:
+    File notADirectory = new File(partitioned, "part-r-00000");
+    long duNotADirectoryActual = FileUtil.getDU(notADirectory);
+    long duNotADirectoryExpected = 3 + System.getProperty("line.separator").length();
+    assertEquals(duNotADirectoryExpected, duNotADirectoryActual);
+    
+    try {
+      // one of target files is not accessible, but the containing directory
+      // is accessible:
+      try {
+        FileUtil.chmod(notADirectory.getAbsolutePath(), "0000");
+      } catch (InterruptedException ie) {
+        // should never happen since that method never throws InterruptedException.      
+        assertNull(ie);  
+      }
+      assertFalse(notADirectory.canRead());
+      final long du3 = FileUtil.getDU(partitioned);
+      assertEquals(expected, du3);
+
+      // some target files and containing directory are not accessible:
+      try {
+        FileUtil.chmod(partitioned.getAbsolutePath(), "0000");
+      } catch (InterruptedException ie) {
+        // should never happen since that method never throws InterruptedException.      
+        assertNull(ie);  
+      }
+      assertFalse(partitioned.canRead());
+      final long du4 = FileUtil.getDU(partitioned);
+      assertEquals(0, du4);
+    } finally {
+      // Restore the permissions so that we can delete the folder 
+      // in @After method:
+      FileUtil.chmod(partitioned.getAbsolutePath(), "0777", true/*recursive*/);
+    }
+  }
+  
+  @Test (timeout = 30000)
+  public void testUnTar() throws IOException {
+    setupDirs();
+    
+    // make a simple tar:
+    final File simpleTar = new File(del, FILE);
+    OutputStream os = new FileOutputStream(simpleTar); 
+    TarOutputStream tos = new TarOutputStream(os);
+    try {
+      TarEntry te = new TarEntry("foo");
+      byte[] data = "some-content".getBytes("UTF-8");
+      te.setSize(data.length);
+      tos.putNextEntry(te);
+      tos.write(data);
+      tos.closeEntry();
+      tos.flush();
+      tos.finish();
+    } finally {
+      tos.close();
+    }
+
+    // successfully untar it into an existing dir:
+    FileUtil.unTar(simpleTar, tmp);
+    // check result:
+    assertTrue(new File(tmp, "foo").exists());
+    assertEquals(12, new File(tmp, "foo").length());
+    
+    final File regularFile = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
+    regularFile.createNewFile();
+    assertTrue(regularFile.exists());
+    try {
+      FileUtil.unTar(simpleTar, regularFile);
+      assertTrue("An IOException expected.", false);
+    } catch (IOException ioe) {
+      // okay
+    }
+  }
+  
+  @Test (timeout = 30000)
+  public void testReplaceFile() throws IOException {
+    setupDirs();
+    final File srcFile = new File(tmp, "src");
+    
+    // src exists, and target does not exist:
+    srcFile.createNewFile();
+    assertTrue(srcFile.exists());
+    final File targetFile = new File(tmp, "target");
+    assertTrue(!targetFile.exists());
+    FileUtil.replaceFile(srcFile, targetFile);
+    assertTrue(!srcFile.exists());
+    assertTrue(targetFile.exists());
+
+    // src exists and target is a regular file: 
+    srcFile.createNewFile();
+    assertTrue(srcFile.exists());
+    FileUtil.replaceFile(srcFile, targetFile);
+    assertTrue(!srcFile.exists());
+    assertTrue(targetFile.exists());
+    
+    // src exists, and target is a non-empty directory: 
+    srcFile.createNewFile();
+    assertTrue(srcFile.exists());
+    targetFile.delete();
+    targetFile.mkdirs();
+    File obstacle = new File(targetFile, "obstacle");
+    obstacle.createNewFile();
+    assertTrue(obstacle.exists());
+    assertTrue(targetFile.exists() && targetFile.isDirectory());
+    try {
+      FileUtil.replaceFile(srcFile, targetFile);
+      assertTrue(false);
+    } catch (IOException ioe) {
+      // okay
+    }
+    // check up the post-condition: nothing is deleted:
+    assertTrue(srcFile.exists());
+    assertTrue(targetFile.exists() && targetFile.isDirectory());
+    assertTrue(obstacle.exists());
+  }
+  
+  @Test (timeout = 30000)
+  public void testCreateLocalTempFile() throws IOException {
+    setupDirs();
+    final File baseFile = new File(tmp, "base");
+    File tmp1 = FileUtil.createLocalTempFile(baseFile, "foo", false);
+    File tmp2 = FileUtil.createLocalTempFile(baseFile, "foo", true);
+    assertFalse(tmp1.getAbsolutePath().equals(baseFile.getAbsolutePath()));
+    assertFalse(tmp2.getAbsolutePath().equals(baseFile.getAbsolutePath()));
+    assertTrue(tmp1.exists() && tmp2.exists());
+    assertTrue(tmp1.canWrite() && tmp2.canWrite());
+    assertTrue(tmp1.canRead() && tmp2.canRead());
+    tmp1.delete();
+    tmp2.delete();
+    assertTrue(!tmp1.exists() && !tmp2.exists());
+  }
+  
+  @Test (timeout = 30000)
+  public void testUnZip() throws IOException {
+    // make sa simple zip
+    setupDirs();
+    
+    // make a simple tar:
+    final File simpleZip = new File(del, FILE);
+    OutputStream os = new FileOutputStream(simpleZip); 
+    ZipOutputStream tos = new ZipOutputStream(os);
+    try {
+      ZipEntry ze = new ZipEntry("foo");
+      byte[] data = "some-content".getBytes("UTF-8");
+      ze.setSize(data.length);
+      tos.putNextEntry(ze);
+      tos.write(data);
+      tos.closeEntry();
+      tos.flush();
+      tos.finish();
+    } finally {
+      tos.close();
+    }
+    
+    // successfully untar it into an existing dir:
+    FileUtil.unZip(simpleZip, tmp);
+    // check result:
+    assertTrue(new File(tmp, "foo").exists());
+    assertEquals(12, new File(tmp, "foo").length());
+    
+    final File regularFile = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
+    regularFile.createNewFile();
+    assertTrue(regularFile.exists());
+    try {
+      FileUtil.unZip(simpleZip, regularFile);
+      assertTrue("An IOException expected.", false);
+    } catch (IOException ioe) {
+      // okay
+    }
+  }  
+  
+  @Test (timeout = 30000)
+  /*
+   * Test method copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf)
+   */
+  public void testCopy5() throws IOException {
+    setupDirs();
+    
+    URI uri = tmp.toURI();
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.newInstance(uri, conf);
+    final String content = "some-content";
+    File srcFile = createFile(tmp, "src", content);
+    Path srcPath = new Path(srcFile.toURI());
+    
+    // copy regular file:
+    final File dest = new File(del, "dest");
+    boolean result = FileUtil.copy(fs, srcPath, dest, false, conf);
+    assertTrue(result);
+    assertTrue(dest.exists());
+    assertEquals(content.getBytes().length 
+        + System.getProperty("line.separator").getBytes().length, dest.length());
+    assertTrue(srcFile.exists()); // should not be deleted
+    
+    // copy regular file, delete src:
+    dest.delete();
+    assertTrue(!dest.exists());
+    result = FileUtil.copy(fs, srcPath, dest, true, conf);
+    assertTrue(result);
+    assertTrue(dest.exists());
+    assertEquals(content.getBytes().length 
+        + System.getProperty("line.separator").getBytes().length, dest.length());
+    assertTrue(!srcFile.exists()); // should be deleted
+    
+    // copy a dir:
+    dest.delete();
+    assertTrue(!dest.exists());
+    srcPath = new Path(partitioned.toURI());
+    result = FileUtil.copy(fs, srcPath, dest, true, conf);
+    assertTrue(result);
+    assertTrue(dest.exists() && dest.isDirectory());
+    File[] files = dest.listFiles();
+    assertTrue(files != null);
+    assertEquals(2, files.length);
+    for (File f: files) {
+      assertEquals(3 
+          + System.getProperty("line.separator").getBytes().length, f.length());
+    }
+    assertTrue(!partitioned.exists()); // should be deleted
+  }  
+
+  @Test (timeout = 30000)
+  public void testStat2Paths1() {
+    assertNull(FileUtil.stat2Paths(null));
+    
+    FileStatus[] fileStatuses = new FileStatus[0]; 
+    Path[] paths = FileUtil.stat2Paths(fileStatuses);
+    assertEquals(0, paths.length);
+    
+    Path path1 = new Path("file://foo");
+    Path path2 = new Path("file://moo");
+    fileStatuses = new FileStatus[] { 
+        new FileStatus(3, false, 0, 0, 0, path1), 
+        new FileStatus(3, false, 0, 0, 0, path2) 
+        };
+    paths = FileUtil.stat2Paths(fileStatuses);
+    assertEquals(2, paths.length);
+    assertEquals(paths[0], path1);
+    assertEquals(paths[1], path2);
+  }
+  
+  @Test (timeout = 30000)
+  public void testStat2Paths2()  {
+    Path defaultPath = new Path("file://default");
+    Path[] paths = FileUtil.stat2Paths(null, defaultPath);
+    assertEquals(1, paths.length);
+    assertEquals(defaultPath, paths[0]);
+
+    paths = FileUtil.stat2Paths(null, null);
+    assertTrue(paths != null);
+    assertEquals(1, paths.length);
+    assertEquals(null, paths[0]);
+    
+    Path path1 = new Path("file://foo");
+    Path path2 = new Path("file://moo");
+    FileStatus[] fileStatuses = new FileStatus[] { 
+        new FileStatus(3, false, 0, 0, 0, path1), 
+        new FileStatus(3, false, 0, 0, 0, path2) 
+        };
+    paths = FileUtil.stat2Paths(fileStatuses, defaultPath);
+    assertEquals(2, paths.length);
+    assertEquals(paths[0], path1);
+    assertEquals(paths[1], path2);
   }
 
   @Test (timeout = 30000)
@@ -757,7 +1033,7 @@ public class TestFileUtil {
     String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
     String nonExistentSubdir = tmp.getCanonicalPath() + Path.SEPARATOR + "subdir"
       + Path.SEPARATOR;
-    List<String> classPaths = Arrays.asList("cp1.jar", "cp2.jar", wildcardPath,
+    List<String> classPaths = Arrays.asList("", "cp1.jar", "cp2.jar", wildcardPath,
       "cp3.jar", nonExistentSubdir);
     String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
     String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
@@ -776,20 +1052,32 @@ public class TestFileUtil {
       Assert.assertNotNull(classPathAttr);
       List<String> expectedClassPaths = new ArrayList<String>();
       for (String classPath: classPaths) {
+        if (classPath.length() == 0) {
+          continue;
+        }
         if (wildcardPath.equals(classPath)) {
           // add wildcard matches
           for (File wildcardMatch: wildcardMatches) {
             expectedClassPaths.add(wildcardMatch.toURI().toURL()
               .toExternalForm());
           }
-        } else if (nonExistentSubdir.equals(classPath)) {
-          // expect to maintain trailing path separator if present in input, even
-          // if directory doesn't exist yet
-          expectedClassPaths.add(new File(classPath).toURI().toURL()
-            .toExternalForm() + Path.SEPARATOR);
         } else {
-          expectedClassPaths.add(new File(classPath).toURI().toURL()
-            .toExternalForm());
+          File fileCp = null;
+          if(!new Path(classPath).isAbsolute()) {
+            fileCp = new File(tmp, classPath);
+          }
+          else {
+            fileCp = new File(classPath);
+          }
+          if (nonExistentSubdir.equals(classPath)) {
+            // expect to maintain trailing path separator if present in input, even
+            // if directory doesn't exist yet
+            expectedClassPaths.add(fileCp.toURI().toURL()
+              .toExternalForm() + Path.SEPARATOR);
+          } else {
+            expectedClassPaths.add(fileCp.toURI().toURL()
+              .toExternalForm());
+          }
         }
       }
       List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java Wed Oct 30 22:21:59 2013
@@ -19,7 +19,9 @@
 package org.apache.hadoop.fs;
 
 import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
 
+import java.io.File;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -106,7 +108,7 @@ public class TestFsShellCopy {  
     Path targetDir = new Path(testRoot, "target");    
     Path filePath = new Path(testRoot, new Path("srcFile"));
     lfs.create(filePath).close();
-    checkPut(filePath, targetDir);
+    checkPut(filePath, targetDir, false);
   }
 
   @Test
@@ -119,10 +121,42 @@ public class TestFsShellCopy {  
     Path dirPath = new Path(testRoot, new Path("srcDir"));
     lfs.mkdirs(dirPath);
     lfs.create(new Path(dirPath, "srcFile")).close();
-    checkPut(dirPath, targetDir);
+    checkPut(dirPath, targetDir, false);
   }
+
+  @Test
+  public void testCopyFileFromWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
+        .toString())).getAbsolutePath();
+    Path testRoot = new Path(windowsTestRootPath, "testPutFile");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path targetDir = new Path(testRoot, "target");
+    Path filePath = new Path(testRoot, new Path("srcFile"));
+    lfs.create(filePath).close();
+    checkPut(filePath, targetDir, true);
+  }
+
+  @Test
+  public void testCopyDirFromWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
+        .toString())).getAbsolutePath();
+    Path testRoot = new Path(windowsTestRootPath, "testPutDir");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path targetDir = new Path(testRoot, "target");
+    Path dirPath = new Path(testRoot, new Path("srcDir"));
+    lfs.mkdirs(dirPath);
+    lfs.create(new Path(dirPath, "srcFile")).close();
+    checkPut(dirPath, targetDir, true);
+  }
+
   
-  private void checkPut(Path srcPath, Path targetDir)
+  private void checkPut(Path srcPath, Path targetDir, boolean useWindowsPath)
   throws Exception {
     lfs.delete(targetDir, true);
     lfs.mkdirs(targetDir);    
@@ -134,37 +168,37 @@ public class TestFsShellCopy {  
     
     // copy to new file, then again
     prepPut(dstPath, false, false);
-    checkPut(0, srcPath, dstPath);
+    checkPut(0, srcPath, dstPath, useWindowsPath);
     if (lfs.isFile(srcPath)) {
-      checkPut(1, srcPath, dstPath);
+      checkPut(1, srcPath, dstPath, useWindowsPath);
     } else { // directory works because it copies into the dir
       // clear contents so the check won't think there are extra paths
       prepPut(dstPath, true, true);
-      checkPut(0, srcPath, dstPath);
+      checkPut(0, srcPath, dstPath, useWindowsPath);
     }
 
     // copy to non-existent subdir
     prepPut(childPath, false, false);
-    checkPut(1, srcPath, dstPath);
+    checkPut(1, srcPath, dstPath, useWindowsPath);
 
     // copy into dir, then with another name
     prepPut(dstPath, true, true);
-    checkPut(0, srcPath, dstPath);
+    checkPut(0, srcPath, dstPath, useWindowsPath);
     prepPut(childPath, true, true);
-    checkPut(0, srcPath, childPath);
+    checkPut(0, srcPath, childPath, useWindowsPath);
 
     // try to put to pwd with existing dir
     prepPut(targetDir, true, true);
-    checkPut(0, srcPath, null);
+    checkPut(0, srcPath, null, useWindowsPath);
     prepPut(targetDir, true, true);
-    checkPut(0, srcPath, new Path("."));
+    checkPut(0, srcPath, new Path("."), useWindowsPath);
 
     // try to put to pwd with non-existent cwd
     prepPut(dstPath, false, true);
     lfs.setWorkingDirectory(dstPath);
-    checkPut(1, srcPath, null);
+    checkPut(1, srcPath, null, useWindowsPath);
     prepPut(dstPath, false, true);
-    checkPut(1, srcPath, new Path("."));
+    checkPut(1, srcPath, new Path("."), useWindowsPath);
   }
 
   private void prepPut(Path dst, boolean create,
@@ -183,12 +217,17 @@ public class TestFsShellCopy {  
     }
   }
   
-  private void checkPut(int exitCode, Path src, Path dest) throws Exception {
+  private void checkPut(int exitCode, Path src, Path dest,
+      boolean useWindowsPath) throws Exception {
     String argv[] = null;
+    String srcPath = src.toString();
+    if (useWindowsPath) {
+      srcPath = (new File(srcPath)).getAbsolutePath();
+    }
     if (dest != null) {
-      argv = new String[]{ "-put", src.toString(), pathAsString(dest) };
+      argv = new String[]{ "-put", srcPath, pathAsString(dest) };
     } else {
-      argv = new String[]{ "-put", src.toString() };
+      argv = new String[]{ "-put", srcPath };
       dest = new Path(Path.CUR_DIR);
     }
     
@@ -418,6 +457,34 @@ public class TestFsShellCopy {  
     assertTrue(lfs.exists(srcDir));
   }
   
+  @Test
+  public void testMoveFromWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    Path testRoot = new Path(testRootDir, "testPutFile");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path target = new Path(testRoot, "target");
+    Path srcFile = new Path(testRoot, new Path("srcFile"));
+    lfs.createNewFile(srcFile);
+
+    String winSrcFile = (new File(srcFile.toUri().getPath()
+        .toString())).getAbsolutePath();
+    shellRun(0, "-moveFromLocal", winSrcFile, target.toString());
+    assertFalse(lfs.exists(srcFile));
+    assertTrue(lfs.exists(target));
+    assertTrue(lfs.isFile(target));
+  }
+
+  @Test
+  public void testGetWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    String winDstFile = (new File(dstPath.toUri().getPath()
+        .toString())).getAbsolutePath();
+    shellRun(0, "-get", srcPath.toString(), winDstFile);
+    checkPath(dstPath, false);
+  }
+  
   private void createFile(Path ... paths) throws IOException {
     for (Path path : paths) {
       FSDataOutputStream out = lfs.create(path);

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java Wed Oct 30 22:21:59 2013
@@ -488,25 +488,6 @@ public class TestFsShellReturnCode {
       }
       return stat;
     }
-
-    @Override
-    public FileStatus getFileLinkStatus(Path p) throws IOException {
-      String f = makeQualified(p).toString();
-      FileStatus stat = super.getFileLinkStatus(p);
-      
-      stat.getPermission();
-      if (owners.containsKey(f)) {
-        stat.setOwner("STUB-"+owners.get(f));      
-      } else {
-        stat.setOwner("REAL-"+stat.getOwner());
-      }
-      if (groups.containsKey(f)) {
-        stat.setGroup("STUB-"+groups.get(f));      
-      } else {
-        stat.setGroup("REAL-"+stat.getGroup());
-      }
-      return stat;
-    }
   }
 
   /**

Modified: hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java Wed Oct 30 22:21:59 2013
@@ -18,14 +18,155 @@
 
 package org.apache.hadoop.fs;
 
-import java.io.IOException;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Progressable;
 import org.junit.Assert;
-import static org.junit.Assert.*;
 import org.junit.Test;
 
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.EnumSet;
+import java.util.Iterator;
+
+import static org.apache.hadoop.fs.Options.ChecksumOpt;
+import static org.apache.hadoop.fs.Options.CreateOpts;
+import static org.apache.hadoop.fs.Options.Rename;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+@SuppressWarnings("deprecation")
 public class TestHarFileSystem {
+  public static final Log LOG = LogFactory.getLog(TestHarFileSystem.class);
+
+  /**
+   * FileSystem methods that must not be overwritten by
+   * {@link HarFileSystem}. Either because there is a default implementation
+   * already available or because it is not relevant.
+   */
+  @SuppressWarnings("deprecation")
+  private interface MustNotImplement {
+    public BlockLocation[] getFileBlockLocations(Path p, long start, long len);
+    public long getLength(Path f);
+    public FSDataOutputStream append(Path f, int bufferSize);
+    public void rename(Path src, Path dst, Rename... options);
+    public boolean exists(Path f);
+    public boolean isDirectory(Path f);
+    public boolean isFile(Path f);
+    public boolean createNewFile(Path f);
+
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        boolean overwrite, int bufferSize, short replication, long blockSize,
+        Progressable progress) throws IOException;
+
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
+        Progressable progress) throws IOException;
+
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
+        Progressable progress, ChecksumOpt checksumOpt);
+
+    public boolean mkdirs(Path f);
+    public FSDataInputStream open(Path f);
+    public FSDataOutputStream create(Path f);
+    public FSDataOutputStream create(Path f, boolean overwrite);
+    public FSDataOutputStream create(Path f, Progressable progress);
+    public FSDataOutputStream create(Path f, short replication);
+    public FSDataOutputStream create(Path f, short replication,
+        Progressable progress);
+
+    public FSDataOutputStream create(Path f, boolean overwrite,
+        int bufferSize);
+
+    public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
+        Progressable progress);
+
+    public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
+        short replication, long blockSize);
+
+    public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
+        short replication, long blockSize, Progressable progress);
+
+    public FSDataOutputStream create(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication,
+        long blockSize, Progressable progress) throws IOException;
+
+    public FSDataOutputStream create(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication,
+        long blockSize, Progressable progress, ChecksumOpt checksumOpt)
+        throws IOException;
+
+    public String getName();
+    public boolean delete(Path f);
+    public short getReplication(Path src);
+    public void processDeleteOnExit();
+    public ContentSummary getContentSummary(Path f);
+    public FsStatus getStatus();
+    public FileStatus[] listStatus(Path f, PathFilter filter);
+    public FileStatus[] listStatus(Path[] files);
+    public FileStatus[] listStatus(Path[] files, PathFilter filter);
+    public FileStatus[] globStatus(Path pathPattern);
+    public FileStatus[] globStatus(Path pathPattern, PathFilter filter);
+
+    public Iterator<LocatedFileStatus> listFiles(Path path,
+        boolean isRecursive);
+
+    public Iterator<LocatedFileStatus> listLocatedStatus(Path f);
+    public Iterator<LocatedFileStatus> listLocatedStatus(Path f,
+        PathFilter filter);
+    public void copyFromLocalFile(Path src, Path dst);
+    public void moveFromLocalFile(Path[] srcs, Path dst);
+    public void moveFromLocalFile(Path src, Path dst);
+    public void copyToLocalFile(Path src, Path dst);
+    public void copyToLocalFile(boolean delSrc, Path src, Path dst,
+        boolean useRawLocalFileSystem);
+    public void moveToLocalFile(Path src, Path dst);
+    public long getBlockSize(Path f);
+    public FSDataOutputStream primitiveCreate(Path f,
+        EnumSet<CreateFlag> createFlag, CreateOpts... opts);
+    public void primitiveMkdir(Path f, FsPermission absolutePermission,
+        boolean createParent);
+    public int getDefaultPort();
+    public String getCanonicalServiceName();
+    public Token<?> getDelegationToken(String renewer) throws IOException;
+    public boolean deleteOnExit(Path f) throws IOException;
+    public boolean cancelDeleteOnExit(Path f) throws IOException;
+    public Token<?>[] addDelegationTokens(String renewer, Credentials creds)
+        throws IOException;
+    public Path fixRelativePart(Path p);
+    public void concat(Path trg, Path [] psrcs) throws IOException;
+    public FSDataOutputStream primitiveCreate(Path f,
+        FsPermission absolutePermission, EnumSet<CreateFlag> flag, int bufferSize,
+        short replication, long blockSize, Progressable progress,
+        ChecksumOpt checksumOpt) throws IOException;
+    public boolean primitiveMkdir(Path f, FsPermission absolutePermission)
+        throws IOException;
+    public RemoteIterator<Path> listCorruptFileBlocks(Path path)
+        throws IOException;
+    public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
+        throws IOException;
+    public void createSymlink(Path target, Path link, boolean createParent)
+        throws IOException;
+    public FileStatus getFileLinkStatus(Path f) throws IOException;
+    public boolean supportsSymlinks();
+    public Path getLinkTarget(Path f) throws IOException;
+    public Path resolveLink(Path f) throws IOException;
+    public void setVerifyChecksum(boolean verifyChecksum);
+    public void setWriteChecksum(boolean writeChecksum);
+    public Path createSnapshot(Path path, String snapshotName) throws
+        IOException;
+    public void renameSnapshot(Path path, String snapshotOldName,
+        String snapshotNewName) throws IOException;
+    public void deleteSnapshot(Path path, String snapshotName)
+        throws IOException;
+  }
+
   @Test
   public void testHarUri() {
     final Configuration conf = new Configuration();
@@ -44,8 +185,7 @@ public class TestHarFileSystem {
       p.getFileSystem(conf);
       Assert.fail(p + " is an invalid path.");
     } catch (IOException e) {
-      System.out.println("GOOD: Got an exception.");
-      e.printStackTrace(System.out);
+      // Expected
     }
   }
 
@@ -133,6 +273,37 @@ public class TestHarFileSystem {
       assertEquals(b[1].getOffset(), 128);
       assertEquals(b[1].getLength(), 384);
     }
+  }
 
+  @Test
+  public void testInheritedMethodsImplemented() throws Exception {
+    int errors = 0;
+    for (Method m : FileSystem.class.getDeclaredMethods()) {
+      if (Modifier.isStatic(m.getModifiers()) ||
+          Modifier.isPrivate(m.getModifiers()) ||
+          Modifier.isFinal(m.getModifiers())) {
+        continue;
+      }
+
+      try {
+        MustNotImplement.class.getMethod(m.getName(), m.getParameterTypes());
+        try {
+          HarFileSystem.class.getDeclaredMethod(m.getName(), m.getParameterTypes());
+          LOG.error("HarFileSystem MUST not implement " + m);
+          errors++;
+        } catch (NoSuchMethodException ex) {
+          // Expected
+        }
+      } catch (NoSuchMethodException exc) {
+        try {
+          HarFileSystem.class.getDeclaredMethod(m.getName(), m.getParameterTypes());
+        } catch (NoSuchMethodException exc2) {
+          LOG.error("HarFileSystem MUST implement " + m);
+          errors++;
+        }
+      }
+    }
+    assertTrue((errors + " methods were not overridden correctly - see log"),
+        errors <= 0);
   }
 }