You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2013/10/16 23:07:35 UTC

svn commit: r1532910 [2/3] - in /hadoop/common/branches/HDFS-2832/hadoop-common-project: hadoop-common/ hadoop-common/src/main/bin/ hadoop-common/src/main/conf/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/a...

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java Wed Oct 16 21:07:28 2013
@@ -28,25 +28,41 @@ import org.apache.hadoop.fs.CommonConfig
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class HttpConfig {
-  private static boolean sslEnabled;
+  private static Policy policy;
+  public enum Policy {
+    HTTP_ONLY,
+    HTTPS_ONLY;
+
+    public static Policy fromString(String value) {
+      if (value.equalsIgnoreCase(CommonConfigurationKeysPublic
+              .HTTP_POLICY_HTTPS_ONLY)) {
+        return HTTPS_ONLY;
+      }
+      return HTTP_ONLY;
+    }
+  }
 
   static {
     Configuration conf = new Configuration();
-    sslEnabled = conf.getBoolean(
-        CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
-        CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
+    boolean sslEnabled = conf.getBoolean(
+            CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY,
+            CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT);
+    policy = sslEnabled ? Policy.HTTPS_ONLY : Policy.HTTP_ONLY;
   }
 
-  public static void setSecure(boolean secure) {
-    sslEnabled = secure;
+  public static void setPolicy(Policy policy) {
+    HttpConfig.policy = policy;
   }
 
   public static boolean isSecure() {
-    return sslEnabled;
+    return policy == Policy.HTTPS_ONLY;
   }
 
   public static String getSchemePrefix() {
     return (isSecure()) ? "https://" : "http://";
   }
 
+  public static String getScheme(Policy policy) {
+    return policy == Policy.HTTPS_ONLY ? "https://" : "http://";
+  }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Wed Oct 16 21:07:28 2013
@@ -341,6 +341,7 @@ public class HttpServer implements Filte
       }
       listener.setHost(bindAddress);
       listener.setPort(port);
+      LOG.info("SSL is enabled on " + toString());
     } else {
       listenerStartedExternally = true;
       listener = connector;

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java Wed Oct 16 21:07:28 2013
@@ -34,6 +34,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.net.ConnectTimeoutException;
 
@@ -531,6 +532,15 @@ public class RetryPolicies {
       this.maxDelayBase = maxDelayBase;
     }
 
+    /**
+     * @return 0 if this is our first failover/retry (i.e., retry immediately),
+     *         sleep exponentially otherwise
+     */
+    private long getFailoverOrRetrySleepTime(int times) {
+      return times == 0 ? 0 : 
+        calculateExponentialTime(delayMillis, times, maxDelayBase);
+    }
+    
     @Override
     public RetryAction shouldRetry(Exception e, int retries,
         int failovers, boolean isIdempotentOrAtMostOnce) throws Exception {
@@ -546,11 +556,8 @@ public class RetryPolicies {
           e instanceof StandbyException ||
           e instanceof ConnectTimeoutException ||
           isWrappedStandbyException(e)) {
-        return new RetryAction(
-            RetryAction.RetryDecision.FAILOVER_AND_RETRY,
-            // retry immediately if this is our first failover, sleep otherwise
-            failovers == 0 ? 0 :
-                calculateExponentialTime(delayMillis, failovers, maxDelayBase));
+        return new RetryAction(RetryAction.RetryDecision.FAILOVER_AND_RETRY,
+            getFailoverOrRetrySleepTime(failovers));
       } else if (e instanceof SocketException ||
                  (e instanceof IOException && !(e instanceof RemoteException))) {
         if (isIdempotentOrAtMostOnce) {
@@ -561,8 +568,14 @@ public class RetryPolicies {
               "whether it was invoked");
         }
       } else {
-        return fallbackPolicy.shouldRetry(e, retries, failovers,
-            isIdempotentOrAtMostOnce);
+        RetriableException re = getWrappedRetriableException(e);
+        if (re != null) {
+          return new RetryAction(RetryAction.RetryDecision.RETRY,
+              getFailoverOrRetrySleepTime(retries));
+        } else {
+          return fallbackPolicy.shouldRetry(e, retries, failovers,
+              isIdempotentOrAtMostOnce);
+        }
       }
     }
     
@@ -596,4 +609,14 @@ public class RetryPolicies {
         StandbyException.class);
     return unwrapped instanceof StandbyException;
   }
+  
+  private static RetriableException getWrappedRetriableException(Exception e) {
+    if (!(e instanceof RemoteException)) {
+      return null;
+    }
+    Exception unwrapped = ((RemoteException)e).unwrapRemoteException(
+        RetriableException.class);
+    return unwrapped instanceof RetriableException ? 
+        (RetriableException) unwrapped : null;
+  }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Wed Oct 16 21:07:28 2013
@@ -1295,6 +1295,29 @@ public abstract class Server {
       }
     }
 
+    private Throwable getCauseForInvalidToken(IOException e) {
+      Throwable cause = e;
+      while (cause != null) {
+        if (cause instanceof RetriableException) {
+          return (RetriableException) cause;
+        } else if (cause instanceof StandbyException) {
+          return (StandbyException) cause;
+        } else if (cause instanceof InvalidToken) {
+          // FIXME: hadoop method signatures are restricting the SASL
+          // callbacks to only returning InvalidToken, but some services
+          // need to throw other exceptions (ex. NN + StandyException),
+          // so for now we'll tunnel the real exceptions via an
+          // InvalidToken's cause which normally is not set 
+          if (cause.getCause() != null) {
+            cause = cause.getCause();
+          }
+          return cause;
+        }
+        cause = cause.getCause();
+      }
+      return e;
+    }
+    
     private void saslProcess(RpcSaslProto saslMessage)
         throws WrappedRpcServerException, IOException, InterruptedException {
       if (saslContextEstablished) {
@@ -1307,29 +1330,11 @@ public abstract class Server {
         try {
           saslResponse = processSaslMessage(saslMessage);
         } catch (IOException e) {
-          IOException sendToClient = e;
-          Throwable cause = e;
-          while (cause != null) {
-            if (cause instanceof InvalidToken) {
-              // FIXME: hadoop method signatures are restricting the SASL
-              // callbacks to only returning InvalidToken, but some services
-              // need to throw other exceptions (ex. NN + StandyException),
-              // so for now we'll tunnel the real exceptions via an
-              // InvalidToken's cause which normally is not set 
-              if (cause.getCause() != null) {
-                cause = cause.getCause();
-              }
-              sendToClient = (IOException) cause;
-              break;
-            }
-            cause = cause.getCause();
-          }
           rpcMetrics.incrAuthenticationFailures();
-          String clientIP = this.toString();
           // attempting user could be null
-          AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser +
-            " (" + e.getLocalizedMessage() + ")");
-          throw sendToClient;
+          AUDITLOG.warn(AUTH_FAILED_FOR + this.toString() + ":"
+              + attemptingUser + " (" + e.getLocalizedMessage() + ")");
+          throw (IOException) getCauseForInvalidToken(e);
         }
         
         if (saslServer != null && saslServer.isComplete()) {

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Wed Oct 16 21:07:28 2013
@@ -45,11 +45,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.ipc.Server.Connection;
+import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.security.token.SecretManager;
-import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+import org.apache.hadoop.security.token.TokenIdentifier;
 
 /**
  * A utility class for dealing with SASL on RPC server
@@ -267,13 +269,15 @@ public class SaslRpcServer {
       this.connection = connection;
     }
 
-    private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken {
-      return encodePassword(secretManager.retrievePassword(tokenid));
+    private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken,
+        StandbyException, RetriableException, IOException {
+      return encodePassword(secretManager.retriableRetrievePassword(tokenid));
     }
 
     @Override
     public void handle(Callback[] callbacks) throws InvalidToken,
-        UnsupportedCallbackException {
+        UnsupportedCallbackException, StandbyException, RetriableException,
+        IOException {
       NameCallback nc = null;
       PasswordCallback pc = null;
       AuthorizeCallback ac = null;
@@ -292,7 +296,8 @@ public class SaslRpcServer {
         }
       }
       if (pc != null) {
-        TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager);
+        TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(),
+            secretManager);
         char[] password = getPassword(tokenIdentifier);
         UserGroupInformation user = null;
         user = tokenIdentifier.getUser(); // may throw exception

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Wed Oct 16 21:07:28 2013
@@ -33,6 +33,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -1325,7 +1326,14 @@ public class UserGroupInformation {
    * @return Credentials of tokens associated with this user
    */
   public synchronized Credentials getCredentials() {
-    return new Credentials(getCredentialsInternal());
+    Credentials creds = new Credentials(getCredentialsInternal());
+    Iterator<Token<?>> iter = creds.getAllTokens().iterator();
+    while (iter.hasNext()) {
+      if (iter.next() instanceof Token.PrivateToken) {
+        iter.remove();
+      }
+    }
+    return creds;
   }
   
   /**

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java Wed Oct 16 21:07:28 2013
@@ -29,6 +29,7 @@ import javax.crypto.spec.SecretKeySpec;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.ipc.RetriableException;
 import org.apache.hadoop.ipc.StandbyException;
 
 
@@ -66,7 +67,29 @@ public abstract class SecretManager<T ex
    * @return the password to use
    * @throws InvalidToken the token was invalid
    */
-  public abstract byte[] retrievePassword(T identifier) throws InvalidToken;
+  public abstract byte[] retrievePassword(T identifier)
+      throws InvalidToken;
+  
+  /**
+   * The same functionality with {@link #retrievePassword}, except that this 
+   * method can throw a {@link RetriableException} or a {@link StandbyException}
+   * to indicate that client can retry/failover the same operation because of 
+   * temporary issue on the server side.
+   * 
+   * @param identifier the identifier to validate
+   * @return the password to use
+   * @throws InvalidToken the token was invalid
+   * @throws StandbyException the server is in standby state, the client can
+   *         try other servers
+   * @throws RetriableException the token was invalid, and the server thinks 
+   *         this may be a temporary issue and suggests the client to retry
+   * @throws IOException to allow future exceptions to be added without breaking
+   *         compatibility        
+   */
+  public byte[] retriableRetrievePassword(T identifier)
+      throws InvalidToken, StandbyException, RetriableException, IOException {
+    return retrievePassword(identifier);
+  }
   
   /**
    * Create an empty token identifier.

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java Wed Oct 16 21:07:28 2013
@@ -19,31 +19,20 @@
 package org.apache.hadoop.security.token;
 
 import com.google.common.collect.Maps;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Map;
-import java.util.ServiceLoader;
-
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-  
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.io.*;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import java.io.*;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.ServiceLoader;
+
 /**
  * The client-side form of the token.
  */
@@ -195,6 +184,19 @@ public class Token<T extends TokenIdenti
     service = newService;
   }
 
+  /**
+   * Indicates whether the token is a clone.  Used by HA failover proxy
+   * to indicate a token should not be visible to the user via
+   * UGI.getCredentials()
+   */
+  @InterfaceAudience.Private
+  @InterfaceStability.Unstable
+  public static class PrivateToken<T extends TokenIdentifier> extends Token<T> {
+    public PrivateToken(Token<T> token) {
+      super(token);
+    }
+  }
+
   @Override
   public void readFields(DataInput in) throws IOException {
     int len = WritableUtils.readVInt(in);

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Wed Oct 16 21:07:28 2013
@@ -45,7 +45,7 @@ import org.apache.hadoop.util.Time;
 
 import com.google.common.base.Preconditions;
 
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "Hive"})
 @InterfaceStability.Evolving
 public abstract 
 class AbstractDelegationTokenSecretManager<TokenIdent 
@@ -289,20 +289,30 @@ extends AbstractDelegationTokenIdentifie
         + tokenRenewInterval, password, getTrackingIdIfEnabled(identifier)));
     return password;
   }
-
-  @Override
-  public synchronized byte[] retrievePassword(TokenIdent identifier)
+  
+  /**
+   * Find the DelegationTokenInformation for the given token id, and verify that
+   * if the token is expired. Note that this method should be called with 
+   * acquiring the secret manager's monitor.
+   */
+  protected DelegationTokenInformation checkToken(TokenIdent identifier)
       throws InvalidToken {
+    assert Thread.holdsLock(this);
     DelegationTokenInformation info = currentTokens.get(identifier);
     if (info == null) {
       throw new InvalidToken("token (" + identifier.toString()
           + ") can't be found in cache");
     }
-    long now = Time.now();
-    if (info.getRenewDate() < now) {
+    if (info.getRenewDate() < Time.now()) {
       throw new InvalidToken("token (" + identifier.toString() + ") is expired");
     }
-    return info.getPassword();
+    return info;
+  }
+  
+  @Override
+  public synchronized byte[] retrievePassword(TokenIdent identifier)
+      throws InvalidToken {
+    return checkToken(identifier).getPassword();
   }
 
   protected String getTrackingIdIfEnabled(TokenIdent ident) {

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/native/native.sln
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj Wed Oct 16 21:07:28 2013
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf-8"?>
+<?xml version="1.0" encoding="utf-8"?>
 
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm Wed Oct 16 21:07:28 2013
@@ -854,8 +854,10 @@ KVNO Timestamp         Principal
 | | The container process has the same Unix user as the NodeManager.  |
 *--------------------------------------+--------------------------------------+
 | <<<LinuxContainerExecutor>>>               | |
-| | Supported only on GNU/Linux, this executor runs the containers as the |
-| | user who submitted the application. It requires all user accounts to be |
+| | Supported only on GNU/Linux, this executor runs the containers as either the |
+| | YARN user who submitted the application (when full security is enabled) or |
+| | as a dedicated user (defaults to nobody) when full security is not enabled. |
+| | When full security is enabled, this executor requires all user accounts to be |
 | | created on the cluster nodes where the containers are launched. It uses |
 | | a <setuid> executable that is included in the Hadoop distribution. |
 | | The NodeManager uses this executable to launch and kill containers. |

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java Wed Oct 16 21:07:28 2013
@@ -24,6 +24,8 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URI;
 import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,15 +34,20 @@ import java.util.List;
 import java.util.jar.Attributes;
 import java.util.jar.JarFile;
 import java.util.jar.Manifest;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.tools.tar.TarEntry;
+import org.apache.tools.tar.TarOutputStream;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Test;
+import static org.junit.Assert.*;
 
 public class TestFileUtil {
   private static final Log LOG = LogFactory.getLog(TestFileUtil.class);
@@ -48,14 +55,14 @@ public class TestFileUtil {
   private static final String TEST_ROOT_DIR = System.getProperty(
       "test.build.data", "/tmp") + "/fu";
   private static final File TEST_DIR = new File(TEST_ROOT_DIR);
-  private static String FILE = "x";
-  private static String LINK = "y";
-  private static String DIR = "dir";
-  private File del = new File(TEST_DIR, "del");
-  private File tmp = new File(TEST_DIR, "tmp");
-  private File dir1 = new File(del, DIR + "1");
-  private File dir2 = new File(del, DIR + "2");
-  private File partitioned = new File(TEST_DIR, "partitioned");
+  private static final String FILE = "x";
+  private static final String LINK = "y";
+  private static final String DIR = "dir";
+  private final File del = new File(TEST_DIR, "del");
+  private final File tmp = new File(TEST_DIR, "tmp");
+  private final File dir1 = new File(del, DIR + "1");
+  private final File dir2 = new File(del, DIR + "2");
+  private final File partitioned = new File(TEST_DIR, "partitioned");
 
   /**
    * Creates multiple directories for testing.
@@ -116,17 +123,17 @@ public class TestFileUtil {
    * @param contents String non-null file contents.
    * @throws IOException if an I/O error occurs.
    */
-  private void createFile(File directory, String name, String contents)
+  private File createFile(File directory, String name, String contents)
       throws IOException {
     File newFile = new File(directory, name);
     PrintWriter pw = new PrintWriter(newFile);
-
     try {
       pw.println(contents);
     }
     finally {
       pw.close();
     }
+    return newFile;
   }
 
   @Test (timeout = 30000)
@@ -553,14 +560,283 @@ public class TestFileUtil {
    * @throws IOException
    */
   @Test (timeout = 30000)
-  public void testGetDU() throws IOException {
+  public void testGetDU() throws Exception {
     setupDirs();
 
     long du = FileUtil.getDU(TEST_DIR);
     // Only two files (in partitioned).  Each has 3 characters + system-specific
     // line separator.
-    long expected = 2 * (3 + System.getProperty("line.separator").length());
+    final long expected = 2 * (3 + System.getProperty("line.separator").length());
     Assert.assertEquals(expected, du);
+    
+    // target file does not exist:
+    final File doesNotExist = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
+    long duDoesNotExist = FileUtil.getDU(doesNotExist);
+    assertEquals(0, duDoesNotExist);
+    
+    // target file is not a directory:
+    File notADirectory = new File(partitioned, "part-r-00000");
+    long duNotADirectoryActual = FileUtil.getDU(notADirectory);
+    long duNotADirectoryExpected = 3 + System.getProperty("line.separator").length();
+    assertEquals(duNotADirectoryExpected, duNotADirectoryActual);
+    
+    try {
+      // one of target files is not accessible, but the containing directory
+      // is accessible:
+      try {
+        FileUtil.chmod(notADirectory.getAbsolutePath(), "0000");
+      } catch (InterruptedException ie) {
+        // should never happen since that method never throws InterruptedException.      
+        assertNull(ie);  
+      }
+      assertFalse(notADirectory.canRead());
+      final long du3 = FileUtil.getDU(partitioned);
+      assertEquals(expected, du3);
+
+      // some target files and containing directory are not accessible:
+      try {
+        FileUtil.chmod(partitioned.getAbsolutePath(), "0000");
+      } catch (InterruptedException ie) {
+        // should never happen since that method never throws InterruptedException.      
+        assertNull(ie);  
+      }
+      assertFalse(partitioned.canRead());
+      final long du4 = FileUtil.getDU(partitioned);
+      assertEquals(0, du4);
+    } finally {
+      // Restore the permissions so that we can delete the folder 
+      // in @After method:
+      FileUtil.chmod(partitioned.getAbsolutePath(), "0777", true/*recursive*/);
+    }
+  }
+  
+  @Test (timeout = 30000)
+  public void testUnTar() throws IOException {
+    setupDirs();
+    
+    // make a simple tar:
+    final File simpleTar = new File(del, FILE);
+    OutputStream os = new FileOutputStream(simpleTar); 
+    TarOutputStream tos = new TarOutputStream(os);
+    try {
+      TarEntry te = new TarEntry("foo");
+      byte[] data = "some-content".getBytes("UTF-8");
+      te.setSize(data.length);
+      tos.putNextEntry(te);
+      tos.write(data);
+      tos.closeEntry();
+      tos.flush();
+      tos.finish();
+    } finally {
+      tos.close();
+    }
+
+    // successfully untar it into an existing dir:
+    FileUtil.unTar(simpleTar, tmp);
+    // check result:
+    assertTrue(new File(tmp, "foo").exists());
+    assertEquals(12, new File(tmp, "foo").length());
+    
+    final File regularFile = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
+    regularFile.createNewFile();
+    assertTrue(regularFile.exists());
+    try {
+      FileUtil.unTar(simpleTar, regularFile);
+      assertTrue("An IOException expected.", false);
+    } catch (IOException ioe) {
+      // okay
+    }
+  }
+  
+  @Test (timeout = 30000)
+  public void testReplaceFile() throws IOException {
+    setupDirs();
+    final File srcFile = new File(tmp, "src");
+    
+    // src exists, and target does not exist:
+    srcFile.createNewFile();
+    assertTrue(srcFile.exists());
+    final File targetFile = new File(tmp, "target");
+    assertTrue(!targetFile.exists());
+    FileUtil.replaceFile(srcFile, targetFile);
+    assertTrue(!srcFile.exists());
+    assertTrue(targetFile.exists());
+
+    // src exists and target is a regular file: 
+    srcFile.createNewFile();
+    assertTrue(srcFile.exists());
+    FileUtil.replaceFile(srcFile, targetFile);
+    assertTrue(!srcFile.exists());
+    assertTrue(targetFile.exists());
+    
+    // src exists, and target is a non-empty directory: 
+    srcFile.createNewFile();
+    assertTrue(srcFile.exists());
+    targetFile.delete();
+    targetFile.mkdirs();
+    File obstacle = new File(targetFile, "obstacle");
+    obstacle.createNewFile();
+    assertTrue(obstacle.exists());
+    assertTrue(targetFile.exists() && targetFile.isDirectory());
+    try {
+      FileUtil.replaceFile(srcFile, targetFile);
+      assertTrue(false);
+    } catch (IOException ioe) {
+      // okay
+    }
+    // check up the post-condition: nothing is deleted:
+    assertTrue(srcFile.exists());
+    assertTrue(targetFile.exists() && targetFile.isDirectory());
+    assertTrue(obstacle.exists());
+  }
+  
+  @Test (timeout = 30000)
+  public void testCreateLocalTempFile() throws IOException {
+    setupDirs();
+    final File baseFile = new File(tmp, "base");
+    File tmp1 = FileUtil.createLocalTempFile(baseFile, "foo", false);
+    File tmp2 = FileUtil.createLocalTempFile(baseFile, "foo", true);
+    assertFalse(tmp1.getAbsolutePath().equals(baseFile.getAbsolutePath()));
+    assertFalse(tmp2.getAbsolutePath().equals(baseFile.getAbsolutePath()));
+    assertTrue(tmp1.exists() && tmp2.exists());
+    assertTrue(tmp1.canWrite() && tmp2.canWrite());
+    assertTrue(tmp1.canRead() && tmp2.canRead());
+    tmp1.delete();
+    tmp2.delete();
+    assertTrue(!tmp1.exists() && !tmp2.exists());
+  }
+  
+  @Test (timeout = 30000)
+  public void testUnZip() throws IOException {
+    // make sa simple zip
+    setupDirs();
+    
+    // make a simple tar:
+    final File simpleZip = new File(del, FILE);
+    OutputStream os = new FileOutputStream(simpleZip); 
+    ZipOutputStream tos = new ZipOutputStream(os);
+    try {
+      ZipEntry ze = new ZipEntry("foo");
+      byte[] data = "some-content".getBytes("UTF-8");
+      ze.setSize(data.length);
+      tos.putNextEntry(ze);
+      tos.write(data);
+      tos.closeEntry();
+      tos.flush();
+      tos.finish();
+    } finally {
+      tos.close();
+    }
+    
+    // successfully untar it into an existing dir:
+    FileUtil.unZip(simpleZip, tmp);
+    // check result:
+    assertTrue(new File(tmp, "foo").exists());
+    assertEquals(12, new File(tmp, "foo").length());
+    
+    final File regularFile = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog");
+    regularFile.createNewFile();
+    assertTrue(regularFile.exists());
+    try {
+      FileUtil.unZip(simpleZip, regularFile);
+      assertTrue("An IOException expected.", false);
+    } catch (IOException ioe) {
+      // okay
+    }
+  }  
+  
+  @Test (timeout = 30000)
+  /*
+   * Test method copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf)
+   */
+  public void testCopy5() throws IOException {
+    setupDirs();
+    
+    URI uri = tmp.toURI();
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.newInstance(uri, conf);
+    final String content = "some-content";
+    File srcFile = createFile(tmp, "src", content);
+    Path srcPath = new Path(srcFile.toURI());
+    
+    // copy regular file:
+    final File dest = new File(del, "dest");
+    boolean result = FileUtil.copy(fs, srcPath, dest, false, conf);
+    assertTrue(result);
+    assertTrue(dest.exists());
+    assertEquals(content.getBytes().length 
+        + System.getProperty("line.separator").getBytes().length, dest.length());
+    assertTrue(srcFile.exists()); // should not be deleted
+    
+    // copy regular file, delete src:
+    dest.delete();
+    assertTrue(!dest.exists());
+    result = FileUtil.copy(fs, srcPath, dest, true, conf);
+    assertTrue(result);
+    assertTrue(dest.exists());
+    assertEquals(content.getBytes().length 
+        + System.getProperty("line.separator").getBytes().length, dest.length());
+    assertTrue(!srcFile.exists()); // should be deleted
+    
+    // copy a dir:
+    dest.delete();
+    assertTrue(!dest.exists());
+    srcPath = new Path(partitioned.toURI());
+    result = FileUtil.copy(fs, srcPath, dest, true, conf);
+    assertTrue(result);
+    assertTrue(dest.exists() && dest.isDirectory());
+    File[] files = dest.listFiles();
+    assertTrue(files != null);
+    assertEquals(2, files.length);
+    for (File f: files) {
+      assertEquals(3 
+          + System.getProperty("line.separator").getBytes().length, f.length());
+    }
+    assertTrue(!partitioned.exists()); // should be deleted
+  }  
+
+  @Test (timeout = 30000)
+  public void testStat2Paths1() {
+    assertNull(FileUtil.stat2Paths(null));
+    
+    FileStatus[] fileStatuses = new FileStatus[0]; 
+    Path[] paths = FileUtil.stat2Paths(fileStatuses);
+    assertEquals(0, paths.length);
+    
+    Path path1 = new Path("file://foo");
+    Path path2 = new Path("file://moo");
+    fileStatuses = new FileStatus[] { 
+        new FileStatus(3, false, 0, 0, 0, path1), 
+        new FileStatus(3, false, 0, 0, 0, path2) 
+        };
+    paths = FileUtil.stat2Paths(fileStatuses);
+    assertEquals(2, paths.length);
+    assertEquals(paths[0], path1);
+    assertEquals(paths[1], path2);
+  }
+  
+  @Test (timeout = 30000)
+  public void testStat2Paths2()  {
+    Path defaultPath = new Path("file://default");
+    Path[] paths = FileUtil.stat2Paths(null, defaultPath);
+    assertEquals(1, paths.length);
+    assertEquals(defaultPath, paths[0]);
+
+    paths = FileUtil.stat2Paths(null, null);
+    assertTrue(paths != null);
+    assertEquals(1, paths.length);
+    assertEquals(null, paths[0]);
+    
+    Path path1 = new Path("file://foo");
+    Path path2 = new Path("file://moo");
+    FileStatus[] fileStatuses = new FileStatus[] { 
+        new FileStatus(3, false, 0, 0, 0, path1), 
+        new FileStatus(3, false, 0, 0, 0, path2) 
+        };
+    paths = FileUtil.stat2Paths(fileStatuses, defaultPath);
+    assertEquals(2, paths.length);
+    assertEquals(paths[0], path1);
+    assertEquals(paths[1], path2);
   }
 
   @Test (timeout = 30000)

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java Wed Oct 16 21:07:28 2013
@@ -19,7 +19,9 @@
 package org.apache.hadoop.fs;
 
 import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
 
+import java.io.File;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -106,7 +108,7 @@ public class TestFsShellCopy {  
     Path targetDir = new Path(testRoot, "target");    
     Path filePath = new Path(testRoot, new Path("srcFile"));
     lfs.create(filePath).close();
-    checkPut(filePath, targetDir);
+    checkPut(filePath, targetDir, false);
   }
 
   @Test
@@ -119,10 +121,42 @@ public class TestFsShellCopy {  
     Path dirPath = new Path(testRoot, new Path("srcDir"));
     lfs.mkdirs(dirPath);
     lfs.create(new Path(dirPath, "srcFile")).close();
-    checkPut(dirPath, targetDir);
+    checkPut(dirPath, targetDir, false);
   }
+
+  @Test
+  public void testCopyFileFromWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
+        .toString())).getAbsolutePath();
+    Path testRoot = new Path(windowsTestRootPath, "testPutFile");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path targetDir = new Path(testRoot, "target");
+    Path filePath = new Path(testRoot, new Path("srcFile"));
+    lfs.create(filePath).close();
+    checkPut(filePath, targetDir, true);
+  }
+
+  @Test
+  public void testCopyDirFromWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
+        .toString())).getAbsolutePath();
+    Path testRoot = new Path(windowsTestRootPath, "testPutDir");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path targetDir = new Path(testRoot, "target");
+    Path dirPath = new Path(testRoot, new Path("srcDir"));
+    lfs.mkdirs(dirPath);
+    lfs.create(new Path(dirPath, "srcFile")).close();
+    checkPut(dirPath, targetDir, true);
+  }
+
   
-  private void checkPut(Path srcPath, Path targetDir)
+  private void checkPut(Path srcPath, Path targetDir, boolean useWindowsPath)
   throws Exception {
     lfs.delete(targetDir, true);
     lfs.mkdirs(targetDir);    
@@ -134,37 +168,37 @@ public class TestFsShellCopy {  
     
     // copy to new file, then again
     prepPut(dstPath, false, false);
-    checkPut(0, srcPath, dstPath);
+    checkPut(0, srcPath, dstPath, useWindowsPath);
     if (lfs.isFile(srcPath)) {
-      checkPut(1, srcPath, dstPath);
+      checkPut(1, srcPath, dstPath, useWindowsPath);
     } else { // directory works because it copies into the dir
       // clear contents so the check won't think there are extra paths
       prepPut(dstPath, true, true);
-      checkPut(0, srcPath, dstPath);
+      checkPut(0, srcPath, dstPath, useWindowsPath);
     }
 
     // copy to non-existent subdir
     prepPut(childPath, false, false);
-    checkPut(1, srcPath, dstPath);
+    checkPut(1, srcPath, dstPath, useWindowsPath);
 
     // copy into dir, then with another name
     prepPut(dstPath, true, true);
-    checkPut(0, srcPath, dstPath);
+    checkPut(0, srcPath, dstPath, useWindowsPath);
     prepPut(childPath, true, true);
-    checkPut(0, srcPath, childPath);
+    checkPut(0, srcPath, childPath, useWindowsPath);
 
     // try to put to pwd with existing dir
     prepPut(targetDir, true, true);
-    checkPut(0, srcPath, null);
+    checkPut(0, srcPath, null, useWindowsPath);
     prepPut(targetDir, true, true);
-    checkPut(0, srcPath, new Path("."));
+    checkPut(0, srcPath, new Path("."), useWindowsPath);
 
     // try to put to pwd with non-existent cwd
     prepPut(dstPath, false, true);
     lfs.setWorkingDirectory(dstPath);
-    checkPut(1, srcPath, null);
+    checkPut(1, srcPath, null, useWindowsPath);
     prepPut(dstPath, false, true);
-    checkPut(1, srcPath, new Path("."));
+    checkPut(1, srcPath, new Path("."), useWindowsPath);
   }
 
   private void prepPut(Path dst, boolean create,
@@ -183,12 +217,17 @@ public class TestFsShellCopy {  
     }
   }
   
-  private void checkPut(int exitCode, Path src, Path dest) throws Exception {
+  private void checkPut(int exitCode, Path src, Path dest,
+      boolean useWindowsPath) throws Exception {
     String argv[] = null;
+    String srcPath = src.toString();
+    if (useWindowsPath) {
+      srcPath = (new File(srcPath)).getAbsolutePath();
+    }
     if (dest != null) {
-      argv = new String[]{ "-put", src.toString(), pathAsString(dest) };
+      argv = new String[]{ "-put", srcPath, pathAsString(dest) };
     } else {
-      argv = new String[]{ "-put", src.toString() };
+      argv = new String[]{ "-put", srcPath };
       dest = new Path(Path.CUR_DIR);
     }
     
@@ -418,6 +457,34 @@ public class TestFsShellCopy {  
     assertTrue(lfs.exists(srcDir));
   }
   
+  @Test
+  public void testMoveFromWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    Path testRoot = new Path(testRootDir, "testPutFile");
+    lfs.delete(testRoot, true);
+    lfs.mkdirs(testRoot);
+
+    Path target = new Path(testRoot, "target");
+    Path srcFile = new Path(testRoot, new Path("srcFile"));
+    lfs.createNewFile(srcFile);
+
+    String winSrcFile = (new File(srcFile.toUri().getPath()
+        .toString())).getAbsolutePath();
+    shellRun(0, "-moveFromLocal", winSrcFile, target.toString());
+    assertFalse(lfs.exists(srcFile));
+    assertTrue(lfs.exists(target));
+    assertTrue(lfs.isFile(target));
+  }
+
+  @Test
+  public void testGetWindowsLocalPath() throws Exception {
+    assumeTrue(Path.WINDOWS);
+    String winDstFile = (new File(dstPath.toUri().getPath()
+        .toString())).getAbsolutePath();
+    shellRun(0, "-get", srcPath.toString(), winDstFile);
+    checkPath(dstPath, false);
+  }
+  
   private void createFile(Path ... paths) throws IOException {
     for (Path path : paths) {
       FSDataOutputStream out = lfs.create(path);

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java Wed Oct 16 21:07:28 2013
@@ -18,14 +18,155 @@
 
 package org.apache.hadoop.fs;
 
-import java.io.IOException;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Progressable;
 import org.junit.Assert;
-import static org.junit.Assert.*;
 import org.junit.Test;
 
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.EnumSet;
+import java.util.Iterator;
+
+import static org.apache.hadoop.fs.Options.ChecksumOpt;
+import static org.apache.hadoop.fs.Options.CreateOpts;
+import static org.apache.hadoop.fs.Options.Rename;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+@SuppressWarnings("deprecation")
 public class TestHarFileSystem {
+  public static final Log LOG = LogFactory.getLog(TestHarFileSystem.class);
+
+  /**
+   * FileSystem methods that must not be overwritten by
+   * {@link HarFileSystem}. Either because there is a default implementation
+   * already available or because it is not relevant.
+   */
+  @SuppressWarnings("deprecation")
+  private interface MustNotImplement {
+    public BlockLocation[] getFileBlockLocations(Path p, long start, long len);
+    public long getLength(Path f);
+    public FSDataOutputStream append(Path f, int bufferSize);
+    public void rename(Path src, Path dst, Rename... options);
+    public boolean exists(Path f);
+    public boolean isDirectory(Path f);
+    public boolean isFile(Path f);
+    public boolean createNewFile(Path f);
+
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        boolean overwrite, int bufferSize, short replication, long blockSize,
+        Progressable progress) throws IOException;
+
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
+        Progressable progress) throws IOException;
+
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
+        Progressable progress, ChecksumOpt checksumOpt);
+
+    public boolean mkdirs(Path f);
+    public FSDataInputStream open(Path f);
+    public FSDataOutputStream create(Path f);
+    public FSDataOutputStream create(Path f, boolean overwrite);
+    public FSDataOutputStream create(Path f, Progressable progress);
+    public FSDataOutputStream create(Path f, short replication);
+    public FSDataOutputStream create(Path f, short replication,
+        Progressable progress);
+
+    public FSDataOutputStream create(Path f, boolean overwrite,
+        int bufferSize);
+
+    public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
+        Progressable progress);
+
+    public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
+        short replication, long blockSize);
+
+    public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
+        short replication, long blockSize, Progressable progress);
+
+    public FSDataOutputStream create(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication,
+        long blockSize, Progressable progress) throws IOException;
+
+    public FSDataOutputStream create(Path f, FsPermission permission,
+        EnumSet<CreateFlag> flags, int bufferSize, short replication,
+        long blockSize, Progressable progress, ChecksumOpt checksumOpt)
+        throws IOException;
+
+    public String getName();
+    public boolean delete(Path f);
+    public short getReplication(Path src);
+    public void processDeleteOnExit();
+    public ContentSummary getContentSummary(Path f);
+    public FsStatus getStatus();
+    public FileStatus[] listStatus(Path f, PathFilter filter);
+    public FileStatus[] listStatus(Path[] files);
+    public FileStatus[] listStatus(Path[] files, PathFilter filter);
+    public FileStatus[] globStatus(Path pathPattern);
+    public FileStatus[] globStatus(Path pathPattern, PathFilter filter);
+
+    public Iterator<LocatedFileStatus> listFiles(Path path,
+        boolean isRecursive);
+
+    public Iterator<LocatedFileStatus> listLocatedStatus(Path f);
+    public Iterator<LocatedFileStatus> listLocatedStatus(Path f,
+        PathFilter filter);
+    public void copyFromLocalFile(Path src, Path dst);
+    public void moveFromLocalFile(Path[] srcs, Path dst);
+    public void moveFromLocalFile(Path src, Path dst);
+    public void copyToLocalFile(Path src, Path dst);
+    public void copyToLocalFile(boolean delSrc, Path src, Path dst,
+        boolean useRawLocalFileSystem);
+    public void moveToLocalFile(Path src, Path dst);
+    public long getBlockSize(Path f);
+    public FSDataOutputStream primitiveCreate(Path f,
+        EnumSet<CreateFlag> createFlag, CreateOpts... opts);
+    public void primitiveMkdir(Path f, FsPermission absolutePermission,
+        boolean createParent);
+    public int getDefaultPort();
+    public String getCanonicalServiceName();
+    public Token<?> getDelegationToken(String renewer) throws IOException;
+    public boolean deleteOnExit(Path f) throws IOException;
+    public boolean cancelDeleteOnExit(Path f) throws IOException;
+    public Token<?>[] addDelegationTokens(String renewer, Credentials creds)
+        throws IOException;
+    public Path fixRelativePart(Path p);
+    public void concat(Path trg, Path [] psrcs) throws IOException;
+    public FSDataOutputStream primitiveCreate(Path f,
+        FsPermission absolutePermission, EnumSet<CreateFlag> flag, int bufferSize,
+        short replication, long blockSize, Progressable progress,
+        ChecksumOpt checksumOpt) throws IOException;
+    public boolean primitiveMkdir(Path f, FsPermission absolutePermission)
+        throws IOException;
+    public RemoteIterator<Path> listCorruptFileBlocks(Path path)
+        throws IOException;
+    public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
+        throws IOException;
+    public void createSymlink(Path target, Path link, boolean createParent)
+        throws IOException;
+    public FileStatus getFileLinkStatus(Path f) throws IOException;
+    public boolean supportsSymlinks();
+    public Path getLinkTarget(Path f) throws IOException;
+    public Path resolveLink(Path f) throws IOException;
+    public void setVerifyChecksum(boolean verifyChecksum);
+    public void setWriteChecksum(boolean writeChecksum);
+    public Path createSnapshot(Path path, String snapshotName) throws
+        IOException;
+    public void renameSnapshot(Path path, String snapshotOldName,
+        String snapshotNewName) throws IOException;
+    public void deleteSnapshot(Path path, String snapshotName)
+        throws IOException;
+  }
+
   @Test
   public void testHarUri() {
     final Configuration conf = new Configuration();
@@ -44,8 +185,7 @@ public class TestHarFileSystem {
       p.getFileSystem(conf);
       Assert.fail(p + " is an invalid path.");
     } catch (IOException e) {
-      System.out.println("GOOD: Got an exception.");
-      e.printStackTrace(System.out);
+      // Expected
     }
   }
 
@@ -133,6 +273,37 @@ public class TestHarFileSystem {
       assertEquals(b[1].getOffset(), 128);
       assertEquals(b[1].getLength(), 384);
     }
+  }
 
+  @Test
+  public void testInheritedMethodsImplemented() throws Exception {
+    int errors = 0;
+    for (Method m : FileSystem.class.getDeclaredMethods()) {
+      if (Modifier.isStatic(m.getModifiers()) ||
+          Modifier.isPrivate(m.getModifiers()) ||
+          Modifier.isFinal(m.getModifiers())) {
+        continue;
+      }
+
+      try {
+        MustNotImplement.class.getMethod(m.getName(), m.getParameterTypes());
+        try {
+          HarFileSystem.class.getDeclaredMethod(m.getName(), m.getParameterTypes());
+          LOG.error("HarFileSystem MUST not implement " + m);
+          errors++;
+        } catch (NoSuchMethodException ex) {
+          // Expected
+        }
+      } catch (NoSuchMethodException exc) {
+        try {
+          HarFileSystem.class.getDeclaredMethod(m.getName(), m.getParameterTypes());
+        } catch (NoSuchMethodException exc2) {
+          LOG.error("HarFileSystem MUST implement " + m);
+          errors++;
+        }
+      }
+    }
+    assertTrue((errors + " methods were not overridden correctly - see log"),
+        errors <= 0);
   }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java Wed Oct 16 21:07:28 2013
@@ -18,14 +18,6 @@
 
 package org.apache.hadoop.fs;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Shell;
@@ -34,6 +26,14 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.junit.Assert.*;
+
 /**
  * This test class checks basic operations with {@link HarFileSystem} including
  * various initialization cases, getters, and modification methods.
@@ -69,7 +69,7 @@ public class TestHarFileSystemBasics {
   /*
    * creates and returns fully initialized HarFileSystem
    */
-  private HarFileSystem createHarFileSysten(final Configuration conf)
+  private HarFileSystem createHarFileSystem(final Configuration conf)
       throws Exception {
     localFileSystem = FileSystem.getLocal(conf);
     localFileSystem.initialize(new URI("file:///"), conf);
@@ -130,7 +130,7 @@ public class TestHarFileSystemBasics {
     }
     // create Har to test:
     conf = new Configuration();
-    harFileSystem = createHarFileSysten(conf);
+    harFileSystem = createHarFileSystem(conf);
   }
 
   @After
@@ -232,6 +232,32 @@ public class TestHarFileSystemBasics {
     assertTrue(p2.toUri().toString().startsWith("har://file-localhost/"));
   }
 
+ @Test
+  public void testListLocatedStatus() throws Exception {
+    String testHarPath = this.getClass().getResource("/test.har").getPath();
+    URI uri = new URI("har://" + testHarPath);
+    HarFileSystem hfs = new HarFileSystem(localFileSystem);
+    hfs.initialize(uri, new Configuration());
+
+    // test.har has the following contents:
+    //   dir1/1.txt
+    //   dir1/2.txt
+    Set<String> expectedFileNames = new HashSet<String>();
+    expectedFileNames.add("1.txt");
+    expectedFileNames.add("2.txt");
+
+    // List contents of dir, and ensure we find all expected files
+    Path path = new Path("dir1");
+    RemoteIterator<LocatedFileStatus> fileList = hfs.listLocatedStatus(path);
+    while (fileList.hasNext()) {
+      String fileName = fileList.next().getPath().getName();
+      assertTrue(fileName + " not in expected files list", expectedFileNames.contains(fileName));
+      expectedFileNames.remove(fileName);
+    }
+    assertEquals("Didn't find all of the expected file names: " + expectedFileNames,
+                 0, expectedFileNames.size());
+  }
+
   // ========== Negative:
 
   @Test

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java Wed Oct 16 21:07:28 2013
@@ -54,7 +54,7 @@ public class TestSSLHttpServer extends H
 
   @Before
   public void setup() throws Exception {
-    HttpConfig.setSecure(true);
+    HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY);
     File base = new File(BASEDIR);
     FileUtil.fullyDelete(base);
     base.mkdirs();
@@ -89,7 +89,7 @@ public class TestSSLHttpServer extends H
     String classpathDir =
         KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
     new File(classpathDir, CONFIG_SITE_XML).delete();
-    HttpConfig.setSecure(false);
+    HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
   }
   
 

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java Wed Oct 16 21:07:28 2013
@@ -19,18 +19,23 @@
 package org.apache.hadoop.io;
 
 import java.io.*;
+
 import junit.framework.TestCase;
 
 import org.apache.commons.logging.*;
-
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.conf.*;
 
 /** Support for flat files of binary key/value pairs. */
 public class TestArrayFile extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestArrayFile.class);
-  private static String FILE =
-    System.getProperty("test.build.data",".") + "/test.array";
+  
+  private static final Path TEST_DIR = new Path(
+      System.getProperty("test.build.data", "/tmp"),
+      TestMapFile.class.getSimpleName());
+  private static String TEST_FILE = new Path(TEST_DIR, "test.array").toString();
 
   public TestArrayFile(String name) { 
     super(name); 
@@ -40,15 +45,15 @@ public class TestArrayFile extends TestC
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
     RandomDatum[] data = generate(10000);
-    writeTest(fs, data, FILE);
-    readTest(fs, data, FILE, conf);
+    writeTest(fs, data, TEST_FILE);
+    readTest(fs, data, TEST_FILE, conf);
   }
 
   public void testEmptyFile() throws Exception {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
-    writeTest(fs, new RandomDatum[0], FILE);
-    ArrayFile.Reader reader = new ArrayFile.Reader(fs, FILE, conf);
+    writeTest(fs, new RandomDatum[0], TEST_FILE);
+    ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
     assertNull(reader.get(0, new RandomDatum()));
     reader.close();
   }
@@ -87,31 +92,75 @@ public class TestArrayFile extends TestC
       LOG.debug("reading " + data.length + " debug");
     }
     ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
-    for (int i = 0; i < data.length; i++) {       // try forwards
-      reader.get(i, v);
-      if (!v.equals(data[i])) {
-        throw new RuntimeException("wrong value at " + i);
+    try {
+      for (int i = 0; i < data.length; i++) {       // try forwards
+        reader.get(i, v);
+        if (!v.equals(data[i])) {
+          throw new RuntimeException("wrong value at " + i);
+        }
       }
-    }
-    for (int i = data.length-1; i >= 0; i--) {    // then backwards
-      reader.get(i, v);
-      if (!v.equals(data[i])) {
-        throw new RuntimeException("wrong value at " + i);
+      for (int i = data.length-1; i >= 0; i--) {    // then backwards
+        reader.get(i, v);
+        if (!v.equals(data[i])) {
+          throw new RuntimeException("wrong value at " + i);
+        }
       }
-    }
-    reader.close();
-    if(LOG.isDebugEnabled()) {
-      LOG.debug("done reading " + data.length + " debug");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("done reading " + data.length + " debug");
+      }
+    } finally {
+      reader.close();
     }
   }
 
-
+  /** 
+   * test on {@link ArrayFile.Reader} iteration methods
+   * <pre> 
+   * {@code next(), seek()} in and out of range.
+   * </pre>
+   */
+  public void testArrayFileIteration() {
+    int SIZE = 10;
+    Configuration conf = new Configuration();    
+    try {
+      FileSystem fs = FileSystem.get(conf);
+      ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, 
+          LongWritable.class, CompressionType.RECORD, defaultProgressable);
+      assertNotNull("testArrayFileIteration error !!!", writer);
+      
+      for (int i = 0; i < SIZE; i++)
+        writer.append(new LongWritable(i));
+      
+      writer.close();
+      
+      ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
+      LongWritable nextWritable = new LongWritable(0);
+      
+      for (int i = 0; i < SIZE; i++) {
+        nextWritable = (LongWritable)reader.next(nextWritable);
+        assertEquals(nextWritable.get(), i);
+      }
+        
+      assertTrue("testArrayFileIteration seek error !!!",
+          reader.seek(new LongWritable(6)));
+      nextWritable = (LongWritable) reader.next(nextWritable);
+      assertTrue("testArrayFileIteration error !!!", reader.key() == 7);
+      assertTrue("testArrayFileIteration error !!!",
+          nextWritable.equals(new LongWritable(7)));
+      assertFalse("testArrayFileIteration error !!!",
+          reader.seek(new LongWritable(SIZE + 5)));
+      reader.close();
+    } catch (Exception ex) {
+      fail("testArrayFileWriterConstruction error !!!");
+    }
+  }
+ 
   /** For debugging and testing. */
   public static void main(String[] args) throws Exception {
     int count = 1024 * 1024;
     boolean create = true;
     boolean check = true;
-    String file = FILE;
+    String file = TEST_FILE;
     String usage = "Usage: TestArrayFile [-count N] [-nocreate] [-nocheck] file";
       
     if (args.length == 0) {
@@ -160,4 +209,11 @@ public class TestArrayFile extends TestC
       fs.close();
     }
   }
+  
+  private static final Progressable defaultProgressable = new Progressable() {
+    @Override
+    public void progress() {      
+    }
+  };
+  
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java Wed Oct 16 21:07:28 2013
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,6 +20,8 @@ package org.apache.hadoop.io;
 
 import java.io.*;
 
+import org.junit.Assert;
+
 import junit.framework.TestCase;
 
 /** Unit tests for ArrayWritable */
@@ -61,4 +63,50 @@ public class TestArrayWritable extends T
       assertEquals(destElements[i],elements[i]);
     }
   }
+  
+ /**
+  * test {@link ArrayWritable} toArray() method 
+  */
+  public void testArrayWritableToArray() {
+    Text[] elements = {new Text("zero"), new Text("one"), new Text("two")};
+    TextArrayWritable arrayWritable = new TextArrayWritable();
+    arrayWritable.set(elements);
+    Object array = arrayWritable.toArray();
+  
+    assertTrue("TestArrayWritable testArrayWritableToArray error!!! ", array instanceof Text[]);
+    Text[] destElements = (Text[]) array;
+  
+    for (int i = 0; i < elements.length; i++) {
+      assertEquals(destElements[i], elements[i]);
+    }
+  }
+  
+  /**
+   * test {@link ArrayWritable} constructor with null
+   */
+  public void testNullArgument() {
+    try {
+      Class<? extends Writable> valueClass = null;
+      new ArrayWritable(valueClass);
+      fail("testNullArgument error !!!");
+    } catch (IllegalArgumentException exp) {
+      //should be for test pass
+    } catch (Exception e) {
+      fail("testNullArgument error !!!");
+    }
+  }
+
+  /**
+   * test {@link ArrayWritable} constructor with {@code String[]} as a parameter
+   */
+  @SuppressWarnings("deprecation")
+  public void testArrayWritableStringConstructor() {
+    String[] original = { "test1", "test2", "test3" };
+    ArrayWritable arrayWritable = new ArrayWritable(original);
+    assertEquals("testArrayWritableStringConstructor class error!!!", 
+        UTF8.class, arrayWritable.getValueClass());
+    Assert.assertArrayEquals("testArrayWritableStringConstructor toString error!!!",
+      original, arrayWritable.toStrings());
+  }
+  
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java Wed Oct 16 21:07:28 2013
@@ -18,28 +18,53 @@
 
 package org.apache.hadoop.io;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-
-import junit.framework.TestCase;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionInputStream;
+import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.Compressor;
+import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.Progressable;
+import org.junit.Assert;
 
 public class TestBloomMapFile extends TestCase {
   private static Configuration conf = new Configuration();
+  private static final Path TEST_ROOT = new Path(
+      System.getProperty("test.build.data", "/tmp"),
+      TestMapFile.class.getSimpleName());
+  private static final Path TEST_DIR = new Path(TEST_ROOT, "testfile");
+  private static final Path TEST_FILE = new Path(TEST_ROOT, "testfile");
+
+  @Override
+  public void setUp() throws Exception {
+    LocalFileSystem fs = FileSystem.getLocal(conf);
+    if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) {
+      Assert.fail("Can't clean up test root dir");
+    }
+    fs.mkdirs(TEST_ROOT);
+  }
   
+  @SuppressWarnings("deprecation")
   public void testMembershipTest() throws Exception {
     // write the file
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-        getName() + ".bloommapfile"); 
     FileSystem fs = FileSystem.getLocal(conf);
-    Path qualifiedDirName = fs.makeQualified(dirName);
+    Path qualifiedDirName = fs.makeQualified(TEST_DIR);
     conf.setInt("io.mapfile.bloom.size", 2048);
     BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), IntWritable.class, Text.class);
+        qualifiedDirName.toString(), IntWritable.class, Text.class);
     IntWritable key = new IntWritable();
     Text value = new Text();
     for (int i = 0; i < 2000; i += 2) {
@@ -48,7 +73,7 @@ public class TestBloomMapFile extends Te
       writer.append(key, value);
     }
     writer.close();
-    
+
     BloomMapFile.Reader reader = new BloomMapFile.Reader(fs,
         qualifiedDirName.toString(), conf);
     // check false positives rate
@@ -58,9 +83,11 @@ public class TestBloomMapFile extends Te
       key.set(i);
       boolean exists = reader.probablyHasKey(key);
       if (i % 2 == 0) {
-        if (!exists) falseNeg++;
+        if (!exists)
+          falseNeg++;
       } else {
-        if (exists) falsePos++;
+        if (exists)
+          falsePos++;
       }
     }
     reader.close();
@@ -71,13 +98,13 @@ public class TestBloomMapFile extends Te
     assertTrue(falsePos < 2);
   }
 
-  private void checkMembershipVaryingSizedKeys(String name, List<Text> keys) throws Exception {
-    Path dirName = new Path(System.getProperty("test.build.data",".") +
-        name + ".bloommapfile"); 
+  @SuppressWarnings("deprecation")
+  private void checkMembershipVaryingSizedKeys(String name, List<Text> keys)
+      throws Exception {
     FileSystem fs = FileSystem.getLocal(conf);
-    Path qualifiedDirName = fs.makeQualified(dirName);
+    Path qualifiedDirName = fs.makeQualified(TEST_DIR);
     BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
-      qualifiedDirName.toString(), Text.class, NullWritable.class);
+        qualifiedDirName.toString(), Text.class, NullWritable.class);
     for (Text key : keys) {
       writer.append(key, NullWritable.get());
     }
@@ -88,7 +115,8 @@ public class TestBloomMapFile extends Te
         qualifiedDirName.toString(), conf);
     Collections.reverse(keys);
     for (Text key : keys) {
-      assertTrue("False negative for existing key " + key, reader.probablyHasKey(key));
+      assertTrue("False negative for existing key " + key,
+          reader.probablyHasKey(key));
     }
     reader.close();
     fs.delete(qualifiedDirName, true);
@@ -108,4 +136,171 @@ public class TestBloomMapFile extends Te
     checkMembershipVaryingSizedKeys(getName(), list);
   }
 
+  /**
+   * test {@code BloomMapFile.delete()} method
+   */
+  public void testDeleteFile() {
+    try {
+      FileSystem fs = FileSystem.getLocal(conf);
+      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+      assertNotNull("testDeleteFile error !!!", writer);
+      BloomMapFile.delete(fs, "." + TEST_FILE);
+    } catch (Exception ex) {
+      fail("unexpect ex in testDeleteFile !!!");
+    }
+  }
+  
+  /**
+   * test {@link BloomMapFile.Reader} constructor with 
+   * IOException
+   */
+  public void testIOExceptionInWriterConstructor() {
+    Path dirNameSpy = org.mockito.Mockito.spy(TEST_FILE);
+    try {
+      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+      writer.append(new IntWritable(1), new Text("123124142"));
+      writer.close();
+
+      org.mockito.Mockito.when(dirNameSpy.getFileSystem(conf)).thenThrow(
+          new IOException());
+      BloomMapFile.Reader reader = new BloomMapFile.Reader(dirNameSpy, conf,
+          MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
+
+      assertNull("testIOExceptionInWriterConstructor error !!!",
+          reader.getBloomFilter());
+      reader.close();
+    } catch (Exception ex) {
+      fail("unexpect ex in testIOExceptionInWriterConstructor !!!");
+    }
+  }
+
+  /**
+   *  test {@link BloomMapFile.Reader.get()} method 
+   */
+  public void testGetBloomMapFile() {
+    int SIZE = 10;
+    try {
+      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+          MapFile.Writer.keyClass(IntWritable.class),
+          MapFile.Writer.valueClass(Text.class));
+
+      for (int i = 0; i < SIZE; i++) {
+        writer.append(new IntWritable(i), new Text());
+      }
+      writer.close();
+
+      BloomMapFile.Reader reader = new BloomMapFile.Reader(TEST_FILE, conf,
+          MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
+
+      for (int i = 0; i < SIZE; i++) {
+        assertNotNull("testGetBloomMapFile error !!!",
+            reader.get(new IntWritable(i), new Text()));
+      }
+            
+      assertNull("testGetBloomMapFile error !!!",
+          reader.get(new IntWritable(SIZE + 5), new Text()));
+      reader.close();
+    } catch (Exception ex) {
+      fail("unexpect ex in testGetBloomMapFile !!!");
+    }
+  }
+
+  /**
+   * test {@code BloomMapFile.Writer} constructors
+   */
+  @SuppressWarnings("deprecation")
+  public void testBloomMapFileConstructors() {
+    try {
+      FileSystem ts = FileSystem.get(conf);
+      String testFileName = TEST_FILE.toString();
+      BloomMapFile.Writer writer1 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
+          defaultCodec, defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer1);
+      BloomMapFile.Writer writer2 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
+          defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer2);
+      BloomMapFile.Writer writer3 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.BLOCK);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer3);
+      BloomMapFile.Writer writer4 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
+          defaultCodec, defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer4);
+      BloomMapFile.Writer writer5 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
+          defaultProgress);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer5);
+      BloomMapFile.Writer writer6 = new BloomMapFile.Writer(conf, ts,
+          testFileName, IntWritable.class, Text.class, CompressionType.RECORD);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer6);
+      BloomMapFile.Writer writer7 = new BloomMapFile.Writer(conf, ts,
+          testFileName, WritableComparator.get(Text.class), Text.class);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer7);
+    } catch (Exception ex) {
+      fail("testBloomMapFileConstructors error !!!");
+    }
+  }
+
+  static final Progressable defaultProgress = new Progressable() {
+    @Override
+    public void progress() {
+    }
+  };
+
+  static final CompressionCodec defaultCodec = new CompressionCodec() {
+    @Override
+    public String getDefaultExtension() {
+      return null;
+    }
+
+    @Override
+    public Class<? extends Decompressor> getDecompressorType() {
+      return null;
+    }
+
+    @Override
+    public Class<? extends Compressor> getCompressorType() {
+      return null;
+    }
+
+    @Override
+    public CompressionOutputStream createOutputStream(OutputStream out,
+        Compressor compressor) throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionOutputStream createOutputStream(OutputStream out)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionInputStream createInputStream(InputStream in,
+        Decompressor decompressor) throws IOException {
+      return null;
+    }
+
+    @Override
+    public CompressionInputStream createInputStream(InputStream in)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public Decompressor createDecompressor() {
+      return null;
+    }
+
+    @Override
+    public Compressor createCompressor() {
+      return null;
+    }
+  };
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java Wed Oct 16 21:07:28 2013
@@ -50,4 +50,28 @@ public class TestBooleanWritable {
     out.flush();
     return out;
   }
+  
+  /**
+   * test {@link BooleanWritable} methods hashCode(), equals(), compareTo() 
+   */
+  @Test
+  public void testCommonMethods() {    
+    assertTrue("testCommonMethods1 error !!!", newInstance(true).equals(newInstance(true)));
+    assertTrue("testCommonMethods2 error  !!!", newInstance(false).equals(newInstance(false)));
+    assertFalse("testCommonMethods3 error !!!", newInstance(false).equals(newInstance(true)));
+    assertTrue("testCommonMethods4 error !!!", checkHashCode(newInstance(true), newInstance(true)));
+    assertFalse("testCommonMethods5 error !!! ", checkHashCode(newInstance(true), newInstance(false)));
+    assertTrue("testCommonMethods6 error !!!", newInstance(true).compareTo(newInstance(false)) > 0 );
+    assertTrue("testCommonMethods7 error !!!", newInstance(false).compareTo(newInstance(true)) < 0 );
+    assertTrue("testCommonMethods8 error !!!", newInstance(false).compareTo(newInstance(false)) == 0 );
+    assertEquals("testCommonMethods9 error !!!", "true", newInstance(true).toString());
+  }
+  
+  private boolean checkHashCode(BooleanWritable f, BooleanWritable s) {
+    return f.hashCode() == s.hashCode();
+  }    
+  
+  private static BooleanWritable newInstance(boolean flag) {
+    return new BooleanWritable(flag);
+  }
 }

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java Wed Oct 16 21:07:28 2013
@@ -133,5 +133,24 @@ public class TestBytesWritable {
     assertTrue("buffer created with (array, len) has bad length",
         zeroBuf.getLength() == copyBuf.getLength());
   }
+    
+  /**
+   * test {@link ByteWritable} 
+   * methods compareTo(), toString(), equals()
+   */
+  @Test
+  public void testObjectCommonMethods() {    
+    byte b = 0x9;
+    ByteWritable bw = new ByteWritable();
+    bw.set(b);
+    assertTrue("testSetByteWritable error", bw.get() == b);
+    assertTrue("testSetByteWritable error < 0", bw.compareTo(new ByteWritable((byte)0xA)) < 0);
+    assertTrue("testSetByteWritable error > 0", bw.compareTo(new ByteWritable((byte)0x8)) > 0);
+    assertTrue("testSetByteWritable error == 0", bw.compareTo(new ByteWritable((byte)0x9)) == 0);
+    assertTrue("testSetByteWritable equals error !!!", bw.equals(new ByteWritable((byte)0x9)));
+    assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new ByteWritable((byte)0xA)));
+    assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new IntWritable(1)));
+    assertEquals("testSetByteWritable error ", "9", bw.toString());    
+  }
+  
 }
-

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java?rev=1532910&r1=1532909&r2=1532910&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java Wed Oct 16 21:07:28 2013
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,6 +20,7 @@ package org.apache.hadoop.io;
 
 import java.io.IOException;
 import java.util.EnumSet;
+import java.util.Iterator;
 import java.lang.reflect.Type;
 
 import junit.framework.TestCase;
@@ -32,8 +33,8 @@ public class TestEnumSetWritable extends
   }
 
   EnumSet<TestEnumSet> nonEmptyFlag = EnumSet.of(TestEnumSet.APPEND);
-  EnumSetWritable<TestEnumSet> nonEmptyFlagWritable = new EnumSetWritable<TestEnumSet>(
-      nonEmptyFlag);
+  EnumSetWritable<TestEnumSet> nonEmptyFlagWritable = 
+      new EnumSetWritable<TestEnumSet>(nonEmptyFlag);
 
   @SuppressWarnings("unchecked")
   public void testSerializeAndDeserializeNonEmpty() throws IOException {
@@ -60,11 +61,12 @@ public class TestEnumSetWritable extends
     }
 
     assertTrue(
-        "Instantiate empty EnumSetWritable with no element type class providesd should throw exception.",
+        "Instantiation of empty EnumSetWritable with no element type class "
+        + "provided should throw exception.",
         gotException);
 
-    EnumSetWritable<TestEnumSet> emptyFlagWritable = new EnumSetWritable<TestEnumSet>(
-        emptyFlag, TestEnumSet.class);
+    EnumSetWritable<TestEnumSet> emptyFlagWritable = 
+        new EnumSetWritable<TestEnumSet>(emptyFlag, TestEnumSet.class);
     DataOutputBuffer out = new DataOutputBuffer();
     ObjectWritable.writeObject(out, emptyFlagWritable, emptyFlagWritable
         .getClass(), null);
@@ -86,11 +88,12 @@ public class TestEnumSetWritable extends
     }
 
     assertTrue(
-        "Instantiate empty EnumSetWritable with no element type class providesd should throw exception.",
+        "Instantiation of empty EnumSetWritable with no element type class "
+        + "provided should throw exception",
         gotException);
 
-    EnumSetWritable<TestEnumSet> nullFlagWritable = new EnumSetWritable<TestEnumSet>(
-        null, TestEnumSet.class);
+    EnumSetWritable<TestEnumSet> nullFlagWritable = 
+        new EnumSetWritable<TestEnumSet>(null, TestEnumSet.class);
 
     DataOutputBuffer out = new DataOutputBuffer();
     ObjectWritable.writeObject(out, nullFlagWritable, nullFlagWritable
@@ -105,10 +108,54 @@ public class TestEnumSetWritable extends
   public EnumSetWritable<TestEnumSet> testField;
 
   public void testAvroReflect() throws Exception {
-    String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\",\"name\":\"TestEnumSet\",\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\",\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
+    String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\","
+        + "\"name\":\"TestEnumSet\","
+        + "\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\","
+        + "\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},"
+        + "\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
     Type type =
       TestEnumSetWritable.class.getField("testField").getGenericType();
     AvroTestUtil.testReflect(nonEmptyFlagWritable, type, schema);
+  }    
+  
+  /**
+   * test {@link EnumSetWritable} equals() method
+   */
+  public void testEnumSetWritableEquals() {
+    EnumSetWritable<TestEnumSet> eset1 = new EnumSetWritable<TestEnumSet>(
+        EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+    EnumSetWritable<TestEnumSet> eset2 = new EnumSetWritable<TestEnumSet>(
+        EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+    assertTrue("testEnumSetWritableEquals error !!!", eset1.equals(eset2));
+    assertFalse("testEnumSetWritableEquals error !!!",
+        eset1.equals(new EnumSetWritable<TestEnumSet>(EnumSet.of(
+            TestEnumSet.APPEND, TestEnumSet.CREATE, TestEnumSet.OVERWRITE),
+            TestEnumSet.class)));
+    assertTrue("testEnumSetWritableEquals getElementType error !!!", eset1
+        .getElementType().equals(TestEnumSet.class));
   }
+  
+  /** 
+   * test {@code EnumSetWritable.write(DataOutputBuffer out)} 
+   *  and iteration by TestEnumSet through iterator().
+   */
+  public void testEnumSetWritableWriteRead() throws Exception {
+    EnumSetWritable<TestEnumSet> srcSet = new EnumSetWritable<TestEnumSet>(
+        EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+    DataOutputBuffer out = new DataOutputBuffer();
+    srcSet.write(out);
+
+    EnumSetWritable<TestEnumSet> dstSet = new EnumSetWritable<TestEnumSet>();
+    DataInputBuffer in = new DataInputBuffer();
+    in.reset(out.getData(), out.getLength());
+    dstSet.readFields(in);
 
+    EnumSet<TestEnumSet> result = dstSet.get();
+    Iterator<TestEnumSet> dstIter = result.iterator();
+    Iterator<TestEnumSet> srcIter = srcSet.iterator();
+    while (dstIter.hasNext() && srcIter.hasNext()) {
+      assertEquals("testEnumSetWritableWriteRead error !!!", dstIter.next(),
+          srcIter.next());
+    }
+  }
 }