You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by wa...@apache.org on 2014/07/15 23:10:28 UTC

svn commit: r1610853 - in /hadoop/common/branches/fs-encryption/hadoop-common-project: hadoop-common/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/crypto/key/ hadoop-common/src/main/java/org/apache/hadoop/fs/permission/ ha...

Author: wang
Date: Tue Jul 15 21:10:24 2014
New Revision: 1610853

URL: http://svn.apache.org/r1610853
Log:
Merge from trunk to branch

Modified:
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderExtension.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java
    hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt Tue Jul 15 21:10:24 2014
@@ -177,6 +177,11 @@ Trunk (Unreleased)
     HADOOP-10769. Create KeyProvider extension to handle delegation tokens.
     (Arun Suresh via atm)
 
+    HADOOP-10812. Delegate KeyProviderExtension#toString to underlying
+    KeyProvider. (wang)
+
+    HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -374,6 +379,10 @@ Trunk (Unreleased)
     NativeAzureFileSystem#NativeAzureFsInputStream#close().
     (Chen He via cnauroth)
 
+    HADOOP-10831. UserProvider is not thread safe. (Benoy Antony via umamahesh)
+
+    HADOOP-10834. Typo in CredentialShell usage. (Benoy Antony via umamahesh)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -388,10 +397,25 @@ Release 2.6.0 - UNRELEASED
 
   IMPROVEMENTS
 
+    HADOOP-10808. Remove unused native code for munlock. (cnauroth)
+
+    HADOOP-10815. Implement Windows equivalent of mlock. (cnauroth)
+
   OPTIMIZATIONS
 
   BUG FIXES
 
+    HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
+    Sivachenko via Colin Patrick McCabe)
+
+    HADOOP-10507. FsShell setfacl can throw ArrayIndexOutOfBoundsException when
+    no perm is specified. (Stephen Chu and Sathish Gurram via cnauroth)
+
+    HADOOP-10780. hadoop_user_info_alloc fails on FreeBSD due to incorrect
+    sysconf use (Dmitry Sivachenko via Colin Patrick McCabe)
+
+    HADOOP-10810. Clean up native code compilation warnings. (cnauroth)
+
 Release 2.5.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -679,6 +703,8 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10419 BufferedFSInputStream NPEs on getPos() on a closed stream
     (stevel)
 
+    HADOOP-10801 dead link in site.xml (Akira AJISAKA via stevel)
+
   BREAKDOWN OF HADOOP-10514 SUBTASKS AND RELATED JIRAS
 
     HADOOP-10520. Extended attributes definition and FileSystem APIs for

Propchange: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-2.5/hadoop-common-project/hadoop-common/CHANGES.txt:r1609091
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1608601-1610850

Propchange: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1608601-1610850

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java Tue Jul 15 21:10:24 2014
@@ -23,9 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
-import java.net.URI;
 import java.security.NoSuchAlgorithmException;
-import java.text.MessageFormat;
 import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
@@ -37,7 +35,6 @@ import com.google.gson.stream.JsonWriter
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
 
 import javax.crypto.KeyGenerator;
 
@@ -137,9 +134,26 @@ public abstract class KeyProvider {
     }
 
     public String toString() {
-      return MessageFormat.format(
-          "cipher: {0}, length: {1} description: {2} created: {3} version: {4}",
-          cipher, bitLength, description, created, versions);
+      final StringBuilder metaSB = new StringBuilder();
+      metaSB.append("cipher: ").append(cipher).append(", ");
+      metaSB.append("length: ").append(bitLength).append(", ");
+      metaSB.append("description: ").append(description).append(", ");
+      metaSB.append("created: ").append(created).append(", ");
+      metaSB.append("version: ").append(versions).append(", ");
+      metaSB.append("attributes: ");
+      if ((attributes != null) && !attributes.isEmpty()) {
+        for (Map.Entry<String, String> attribute : attributes.entrySet()) {
+          metaSB.append("[");
+          metaSB.append(attribute.getKey());
+          metaSB.append("=");
+          metaSB.append(attribute.getValue());
+          metaSB.append("], ");
+        }
+        metaSB.deleteCharAt(metaSB.length() - 2);  // remove last ', '
+      } else {
+        metaSB.append("null");
+      }
+      return metaSB.toString();
     }
 
     public String getDescription() {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderExtension.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderExtension.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderExtension.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderExtension.java Tue Jul 15 21:10:24 2014
@@ -120,4 +120,9 @@ public abstract class KeyProviderExtensi
   public void flush() throws IOException {
     keyProvider.flush();
   }
+
+  @Override
+  public String toString() {
+    return getClass().getSimpleName() + ": " + keyProvider.toString();
+  }
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java Tue Jul 15 21:10:24 2014
@@ -22,7 +22,9 @@ import java.io.IOException;
 import java.io.PrintStream;
 import java.security.InvalidParameterException;
 import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -90,6 +92,7 @@ public class KeyShell extends Configured
    */
   private int init(String[] args) throws IOException {
     final Options options = KeyProvider.options(getConf());
+    final Map<String, String> attributes = new HashMap<String, String>();
 
     for (int i = 0; i < args.length; i++) { // parse command line
       boolean moreTokens = (i < args.length - 1);
@@ -134,6 +137,23 @@ public class KeyShell extends Configured
         options.setCipher(args[++i]);
       } else if ("--description".equals(args[i]) && moreTokens) {
         options.setDescription(args[++i]);
+      } else if ("--attr".equals(args[i]) && moreTokens) {
+        final String attrval[] = args[++i].split("=", 2);
+        final String attr = attrval[0].trim();
+        final String val = attrval[1].trim();
+        if (attr.isEmpty() || val.isEmpty()) {
+          out.println("\nAttributes must be in attribute=value form, " +
+                  "or quoted\nlike \"attribute = value\"\n");
+          printKeyShellUsage();
+          return -1;
+        }
+        if (attributes.containsKey(attr)) {
+          out.println("\nEach attribute must correspond to only one value:\n" +
+                  "atttribute \"" + attr + "\" was repeated\n" );
+          printKeyShellUsage();
+          return -1;
+        }
+        attributes.put(attr, val);
       } else if ("--provider".equals(args[i]) && moreTokens) {
         userSuppliedProvider = true;
         getConf().set(KeyProviderFactory.KEY_PROVIDER_PATH, args[++i]);
@@ -156,6 +176,10 @@ public class KeyShell extends Configured
       return -1;
     }
 
+    if (!attributes.isEmpty()) {
+      options.setAttributes(attributes);
+    }
+
     return 0;
   }
 
@@ -404,6 +428,7 @@ public class KeyShell extends Configured
     public static final String USAGE =
       "create <keyname> [--cipher <cipher>] [--size <size>]\n" +
       "                     [--description <description>]\n" +
+      "                     [--attr <attribute=value>]\n" +
       "                     [--provider <provider>] [--help]";
     public static final String DESC =
       "The create subcommand creates a new key for the name specified\n" +
@@ -411,7 +436,9 @@ public class KeyShell extends Configured
       "--provider argument. You may specify a cipher with the --cipher\n" +
       "argument. The default cipher is currently \"AES/CTR/NoPadding\".\n" +
       "The default keysize is 256. You may specify the requested key\n" +
-      "length using the --size argument.\n";
+      "length using the --size argument. Arbitrary attribute=value\n" +
+      "style attributes may be specified using the --attr argument.\n" +
+      "--attr may be specified multiple times, once per attribute.\n";
 
     final String keyName;
     final Options options;

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java Tue Jul 15 21:10:24 2014
@@ -278,7 +278,7 @@ public class AclEntry {
     }
 
     if (includePermission) {
-      if (split.length < index) {
+      if (split.length <= index) {
         throw new HadoopIllegalArgumentException("Invalid <aclSpec> : "
             + aclStr);
       }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Tue Jul 15 21:10:24 2014
@@ -292,8 +292,6 @@ public class NativeIO {
 
     static native void mlock_native(
         ByteBuffer buffer, long len) throws NativeIOException;
-    static native void munlock_native(
-        ByteBuffer buffer, long len) throws NativeIOException;
 
     /**
      * Locks the provided direct ByteBuffer into memory, preventing it from
@@ -312,23 +310,6 @@ public class NativeIO {
       }
       mlock_native(buffer, len);
     }
-
-    /**
-     * Unlocks a locked direct ByteBuffer, allowing it to swap out of memory.
-     * This is a no-op if the ByteBuffer was not previously locked.
-     * 
-     * See the munlock(2) man page for more information.
-     * 
-     * @throws NativeIOException
-     */
-    public static void munlock(ByteBuffer buffer, long len)
-        throws IOException {
-      assertCodeLoaded();
-      if (!buffer.isDirect()) {
-        throw new IOException("Cannot munlock a non-direct ByteBuffer");
-      }
-      munlock_native(buffer, len);
-    }
     
     /**
      * Unmaps the block from memory. See munmap(2).
@@ -570,6 +551,19 @@ public class NativeIO {
       return access0(path, desiredAccess.accessRight());
     }
 
+    /**
+     * Extends both the minimum and maximum working set size of the current
+     * process.  This method gets the current minimum and maximum working set
+     * size, adds the requested amount to each and then sets the minimum and
+     * maximum working set size to the new values.  Controlling the working set
+     * size of the process also controls the amount of memory it can lock.
+     *
+     * @param delta amount to increment minimum and maximum working set size
+     * @throws IOException for any error
+     * @see POSIX#mlock(ByteBuffer, long)
+     */
+    public static native void extendWorkingSetSize(long delta) throws IOException;
+
     static {
       if (NativeCodeLoader.isNativeCodeLoaded()) {
         try {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java Tue Jul 15 21:10:24 2014
@@ -85,7 +85,7 @@ class MetricsConfig extends SubsetConfig
   private ClassLoader pluginLoader;
 
   MetricsConfig(Configuration c, String prefix) {
-    super(c, prefix, ".");
+    super(c, prefix.toLowerCase(Locale.US), ".");
   }
 
   static MetricsConfig create(String prefix) {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java Tue Jul 15 21:10:24 2014
@@ -29,6 +29,8 @@ import org.apache.hadoop.classification.
  * abstraction to separate credential storage from users of them. It
  * is intended to support getting or storing passwords in a variety of ways,
  * including third party bindings.
+ * 
+ * <code>CredentialProvider</code> implementations must be thread safe.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java Tue Jul 15 21:10:24 2014
@@ -264,7 +264,7 @@ public class CredentialShell extends Con
                   alias + " from CredentialProvider " + provider.toString() +
                   ". Continue?:");
           if (!cont) {
-            out.println("Nothing has been be deleted.");
+            out.println("Nothing has been deleted.");
           }
           return cont;
         } catch (IOException e) {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java Tue Jul 15 21:10:24 2014
@@ -55,7 +55,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public CredentialEntry getCredentialEntry(String alias) {
+  public synchronized CredentialEntry getCredentialEntry(String alias) {
     byte[] bytes = credentials.getSecretKey(new Text(alias));
     if (bytes == null) {
       return null;
@@ -64,7 +64,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public CredentialEntry createCredentialEntry(String name, char[] credential) 
+  public synchronized CredentialEntry createCredentialEntry(String name, char[] credential) 
       throws IOException {
     Text nameT = new Text(name);
     if (credentials.getSecretKey(nameT) != null) {
@@ -77,7 +77,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public void deleteCredentialEntry(String name) throws IOException {
+  public synchronized void deleteCredentialEntry(String name) throws IOException {
     byte[] cred = credentials.getSecretKey(new Text(name));
     if (cred != null) {
       credentials.removeSecretKey(new Text(name));
@@ -95,7 +95,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public void flush() {
+  public synchronized void flush() {
     user.addCredentials(credentials);
   }
 
@@ -112,7 +112,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public List<String> getAliases() throws IOException {
+  public synchronized List<String> getAliases() throws IOException {
     List<String> list = new ArrayList<String>();
     List<Text> aliases = credentials.getAllSecretKeys();
     for (Text key : aliases) {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Tue Jul 15 21:10:24 2014
@@ -379,6 +379,7 @@ Java_org_apache_hadoop_io_compress_zlib_
       return (*env)->NewStringUTF(env, dl_info.dli_fname);
     }
   }
+  return (*env)->NewStringUTF(env, "Unavailable");
 #endif
 
 #ifdef WINDOWS

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Tue Jul 15 21:10:24 2014
@@ -388,10 +388,10 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz,
   jobject buffer, jlong len)
 {
-#ifdef UNIX
   void* buf = (void*)(*env)->GetDirectBufferAddress(env, buffer);
   PASS_EXCEPTIONS(env);
 
+#ifdef UNIX
   if (mlock(buf, len)) {
     CHECK_DIRECT_BUFFER_ADDRESS(buf);
     throw_ioe(env, errno);
@@ -399,37 +399,11 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #endif
 
 #ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.mlock_native() is not supported on Windows");
-#endif
-}
-
-/**
- * public static native void munlock_native(
- *   ByteBuffer buffer, long offset);
- *
- * The "00024" in the function name is an artifact of how JNI encodes
- * special characters. U+0024 is '$'.
- */
-JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_munlock_1native(
-  JNIEnv *env, jclass clazz,
-  jobject buffer, jlong len)
-{
-#ifdef UNIX
-  void* buf = (void*)(*env)->GetDirectBufferAddress(env, buffer);
-  PASS_EXCEPTIONS(env);
-
-  if (munlock(buf, len)) {
+  if (!VirtualLock(buf, len)) {
     CHECK_DIRECT_BUFFER_ADDRESS(buf);
-    throw_ioe(env, errno);
+    throw_ioe(env, GetLastError());
   }
 #endif
-
-#ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.munlock_native() is not supported on Windows");
-#endif
 }
 
 #ifdef __FreeBSD__
@@ -606,6 +580,8 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jint uid)
 {
 #ifdef UNIX
+  jstring jstr_username = NULL;
+  char *pw_buf = NULL;
   int pw_lock_locked = 0;
   if (pw_lock_object != NULL) {
     if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
@@ -614,7 +590,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     pw_lock_locked = 1;
   }
 
-  char *pw_buf = NULL;
   int rc;
   size_t pw_buflen = get_pw_buflen();
   if ((pw_buf = malloc(pw_buflen)) == NULL) {
@@ -649,7 +624,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     goto cleanup;
   }
 
-  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+  jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
 
 cleanup:
   if (pw_lock_locked) {
@@ -690,7 +665,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #ifdef WINDOWS
   THROW(env, "java/io/IOException",
     "The function POSIX.mmap() is not supported on Windows");
-  return NULL;
+  return (jlong)(intptr_t)NULL;
 #endif
 }
 
@@ -710,7 +685,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #ifdef WINDOWS
   THROW(env, "java/io/IOException",
     "The function POSIX.munmap() is not supported on Windows");
-  return NULL;
 #endif
 }
 
@@ -726,6 +700,8 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jint gid)
 {
 #ifdef UNIX
+  jstring jstr_groupname = NULL;
+  char *pw_buf = NULL;
   int pw_lock_locked = 0;
  
   if (pw_lock_object != NULL) {
@@ -735,7 +711,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     pw_lock_locked = 1;
   }
   
-  char *pw_buf = NULL;
   int rc;
   size_t pw_buflen = get_pw_buflen();
   if ((pw_buf = malloc(pw_buflen)) == NULL) {
@@ -770,7 +745,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     goto cleanup;
   }
 
-  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+  jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
   PASS_EXCEPTIONS_GOTO(env, cleanup);
   
 cleanup:
@@ -948,7 +923,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #ifdef UNIX
   THROW(env, "java/io/IOException",
     "The function setFilePointer(FileDescriptor) is not supported on Unix");
-  return NULL;
+  return (jlong)(intptr_t)NULL;
 #endif
 
 #ifdef WINDOWS
@@ -983,7 +958,7 @@ JNIEXPORT jboolean JNICALL Java_org_apac
 #ifdef UNIX
   THROW(env, "java/io/IOException",
     "The function access0(path, access) is not supported on Unix");
-  return NULL;
+  return (jlong)(intptr_t)NULL;
 #endif
 
 #ifdef WINDOWS
@@ -1008,6 +983,40 @@ cleanup:
 #endif
 }
 
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method:    extendWorkingSetSize
+ * Signature: (J)V
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_extendWorkingSetSize(
+  JNIEnv *env, jclass clazz, jlong delta)
+{
+#ifdef UNIX
+  THROW(env, "java/io/IOException",
+    "The function extendWorkingSetSize(delta) is not supported on Unix");
+#endif
+
+#ifdef WINDOWS
+  SIZE_T min, max;
+  HANDLE hProcess = GetCurrentProcess();
+  if (!GetProcessWorkingSetSize(hProcess, &min, &max)) {
+    throw_ioe(env, GetLastError());
+    return;
+  }
+  if (!SetProcessWorkingSetSizeEx(hProcess, min + delta, max + delta,
+      QUOTA_LIMITS_HARDWS_MIN_DISABLE | QUOTA_LIMITS_HARDWS_MAX_DISABLE)) {
+    throw_ioe(env, GetLastError());
+    return;
+  }
+  // There is no need to call CloseHandle on the pseudo-handle returned from
+  // GetCurrentProcess.
+#endif
+}
+
 JNIEXPORT void JNICALL 
 Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env, 
 jclass clazz, jstring jsrc, jstring jdst)

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c Tue Jul 15 21:10:24 2014
@@ -120,17 +120,19 @@ Java_org_apache_hadoop_net_unix_DomainSo
 JNIEnv *env, jobject obj, jint fd)
 {
   struct fd_set_data *sd;
-  struct pollfd *pollfd, *last_pollfd;
+  struct pollfd *pollfd = NULL, *last_pollfd;
   int used_size, i;
 
   sd = (struct fd_set_data*)(intptr_t)(*env)->
       GetLongField(env, obj, fd_set_data_fid);
   used_size = sd->used_size;
   for (i = 0; i < used_size; i++) {
-    pollfd = sd->pollfd + i;
-    if (pollfd->fd == fd) break;
+    if (sd->pollfd[i].fd == fd) {
+      pollfd = sd->pollfd + i;
+      break;
+    }
   }
-  if (i == used_size) {
+  if (pollfd == NULL) {
     (*env)->Throw(env, newRuntimeException(env, "failed to remove fd %d "
           "from the FdSet because it was never present.", fd));
     return;

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c Tue Jul 15 21:10:24 2014
@@ -45,7 +45,7 @@ static void throw_ioexception(JNIEnv* en
     FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
     NULL, *(DWORD*) (&errnum), // reinterpret cast
     MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
-    (LPSTR*)&buffer, 0, NULL);
+    buffer, 0, NULL);
 
   if (len > 0)
   {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c Tue Jul 15 21:10:24 2014
@@ -36,7 +36,7 @@
 struct hadoop_user_info *hadoop_user_info_alloc(void)
 {
   struct hadoop_user_info *uinfo;
-  size_t buf_sz;
+  long buf_sz;
   char *buf;
 
   uinfo = calloc(1, sizeof(struct hadoop_user_info));
@@ -193,7 +193,7 @@ int hadoop_user_info_getgroups(struct ha
   ngroups = uinfo->gids_size;
   ret = getgrouplist(uinfo->pwd.pw_name, uinfo->pwd.pw_gid, 
                          uinfo->gids, &ngroups);
-  if (ret > 0) {
+  if (ret >= 0) {
     uinfo->num_gids = ngroups;
     ret = put_primary_gid_first(uinfo);
     if (ret) {

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Tue Jul 15 21:10:24 2014
@@ -17,35 +17,41 @@
  */
 package org.apache.hadoop.crypto.key;
 
-import static org.junit.Assert.*;
-
 import java.io.ByteArrayOutputStream;
 import java.io.File;
+import java.io.IOException;
 import java.io.PrintStream;
 import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class TestKeyShell {
   private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
   private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
 
-  private static File tmpDir;
-
   private PrintStream initialStdOut;
   private PrintStream initialStdErr;
 
+  /* The default JCEKS provider - for testing purposes */
+  private String jceksProvider;
+
   @Before
   public void setup() throws Exception {
     outContent.reset();
     errContent.reset();
-    tmpDir = new File(System.getProperty("test.build.data", "target"),
+    final File tmpDir = new File(System.getProperty("test.build.data", "target"),
         UUID.randomUUID().toString());
-    tmpDir.mkdirs();
+    if (!tmpDir.mkdirs()) {
+      throw new IOException("Unable to create " + tmpDir);
+    }
+    jceksProvider = "jceks://file" + tmpDir + "/keystore.jceks";
     initialStdOut = System.out;
     initialStdErr = System.err;
     System.setOut(new PrintStream(outContent));
@@ -58,65 +64,80 @@ public class TestKeyShell {
     System.setErr(initialStdErr);
   }
 
+  /**
+   * Delete a key from the default jceksProvider
+   * @param ks The KeyShell instance
+   * @param keyName The key to delete
+   * @throws Exception
+   */
+  private void deleteKey(KeyShell ks, String keyName) throws Exception {
+    int rc;
+    outContent.reset();
+    final String[] delArgs = {"delete", keyName, "--provider", jceksProvider};
+    rc = ks.run(delArgs);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully deleted."));
+  }
+
+  /**
+   * Lists the keys in the jceksProvider
+   * @param ks The KeyShell instance
+   * @param wantMetadata True if you want metadata returned with the keys
+   * @return The output from the "list" call
+   * @throws Exception
+   */
+  private String listKeys(KeyShell ks, boolean wantMetadata) throws Exception {
+    int rc;
+    outContent.reset();
+    final String[] listArgs = {"list", "--provider", jceksProvider };
+    final String[] listArgsM = {"list", "--metadata", "--provider", jceksProvider };
+    rc = ks.run(wantMetadata ? listArgsM : listArgs);
+    assertEquals(0, rc);
+    return outContent.toString();
+  }
+
   @Test
   public void testKeySuccessfulKeyLifecycle() throws Exception {
-    outContent.reset();
-    String[] args1 = {"create", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
     int rc = 0;
+    String keyName = "key1";
+
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
-    rc = ks.run(args1);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"created."));
 
     outContent.reset();
-    String[] args2 = {"list", "--provider",
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2);
+    final String[] args1 = {"create", keyName, "--provider", jceksProvider};
+    rc = ks.run(args1);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully created."));
 
-    outContent.reset();
-    String[] args2a = {"list", "--metadata", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2a);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
-    assertTrue(outContent.toString().contains("description"));
-    assertTrue(outContent.toString().contains("created"));
+    String listOut = listKeys(ks, false);
+    assertTrue(listOut.contains(keyName));
+
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains(keyName));
+    assertTrue(listOut.contains("description"));
+    assertTrue(listOut.contains("created"));
 
     outContent.reset();
-    String[] args3 = {"roll", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args3);
+    final String[] args2 = {"roll", keyName, "--provider", jceksProvider};
+    rc = ks.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"rolled."));
 
-    outContent.reset();
-    String[] args4 = {"delete", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args4);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"deleted."));
+    deleteKey(ks, keyName);
 
-    outContent.reset();
-    String[] args5 = {"list", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args5);
-    assertEquals(0, rc);
-    assertFalse(outContent.toString(), outContent.toString().contains("key1"));
+    listOut = listKeys(ks, false);
+    assertFalse(listOut, listOut.contains(keyName));
   }
   
   /* HADOOP-10586 KeyShell didn't allow -description. */
   @Test
   public void testKeySuccessfulCreationWithDescription() throws Exception {
     outContent.reset();
-    String[] args1 = {"create", "key1", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks",
+    final String[] args1 = {"create", "key1", "--provider", jceksProvider,
                       "--description", "someDescription"};
     int rc = 0;
     KeyShell ks = new KeyShell();
@@ -126,20 +147,16 @@ public class TestKeyShell {
     assertTrue(outContent.toString().contains("key1 has been successfully " +
         "created."));
 
-    outContent.reset();
-    String[] args2a = {"list", "--metadata", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2a);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("description"));
-    assertTrue(outContent.toString().contains("someDescription"));
+    String listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("description"));
+    assertTrue(listOut.contains("someDescription"));
   }
 
   @Test
   public void testInvalidKeySize() throws Exception {
-    String[] args1 = {"create", "key1", "--size", "56", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    
+    final String[] args1 = {"create", "key1", "--size", "56", "--provider",
+            jceksProvider};
+
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
@@ -150,9 +167,9 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidCipher() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    
+    final String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider",
+            jceksProvider};
+
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
@@ -163,7 +180,7 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidProvider() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES", "--provider", 
+    final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
       "sdff://file/tmp/keystore.jceks"};
     
     int rc = 0;
@@ -177,7 +194,7 @@ public class TestKeyShell {
 
   @Test
   public void testTransientProviderWarning() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES", "--provider", 
+    final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
       "user:///"};
     
     int rc = 0;
@@ -191,7 +208,7 @@ public class TestKeyShell {
   
   @Test
   public void testTransientProviderOnlyConfig() throws Exception {
-    String[] args1 = {"create", "key1"};
+    final String[] args1 = {"create", "key1"};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
@@ -206,23 +223,96 @@ public class TestKeyShell {
 
   @Test
   public void testFullCipher() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES/CBC/pkcs5Padding", 
-        "--provider", "jceks://file" + tmpDir + "/keystore.jceks"};
+    final String keyName = "key1";
+    final String[] args1 = {"create", keyName, "--cipher", "AES/CBC/pkcs5Padding",
+        "--provider", jceksProvider};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"created."));
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully " +	"created."));
+
+    deleteKey(ks, keyName);
+  }
+
+  @Test
+  public void testAttributes() throws Exception {
+    int rc;
+    KeyShell ks = new KeyShell();
+    ks.setConf(new Configuration());
+
+    /* Simple creation test */
+    final String[] args1 = {"create", "keyattr1", "--provider", jceksProvider,
+            "--attr", "foo=bar"};
+    rc = ks.run(args1);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains("keyattr1 has been " +
+            "successfully " + "created."));
+
+    /* ...and list to see that we have the attr */
+    String listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr1"));
+    assertTrue(listOut.contains("attributes: [foo=bar]"));
 
+    /* Negative tests: no attribute */
     outContent.reset();
-    String[] args2 = {"delete", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
+    final String[] args2 = {"create", "keyattr2", "--provider", jceksProvider,
+            "--attr", "=bar"};
+    rc = ks.run(args2);
+    assertEquals(-1, rc);
+
+    /* Not in attribute = value form */
+    outContent.reset();
+    args2[5] = "foo";
+    rc = ks.run(args2);
+    assertEquals(-1, rc);
+
+    /* No attribute or value */
+    outContent.reset();
+    args2[5] = "=";
+    rc = ks.run(args2);
+    assertEquals(-1, rc);
+
+    /* Legal: attribute is a, value is b=c */
+    outContent.reset();
+    args2[5] = "a=b=c";
     rc = ks.run(args2);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"deleted."));
+
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr2"));
+    assertTrue(listOut.contains("attributes: [a=b=c]"));
+
+    /* Test several attrs together... */
+    outContent.reset();
+    final String[] args3 = {"create", "keyattr3", "--provider", jceksProvider,
+            "--attr", "foo = bar",
+            "--attr", " glarch =baz  ",
+            "--attr", "abc=def"};
+    rc = ks.run(args3);
+    assertEquals(0, rc);
+
+    /* ...and list to ensure they're there. */
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr3"));
+    assertTrue(listOut.contains("[foo=bar]"));
+    assertTrue(listOut.contains("[glarch=baz]"));
+    assertTrue(listOut.contains("[abc=def]"));
+
+    /* Negative test - repeated attributes should fail */
+    outContent.reset();
+    final String[] args4 = {"create", "keyattr4", "--provider", jceksProvider,
+            "--attr", "foo=bar",
+            "--attr", "foo=glarch"};
+    rc = ks.run(args4);
+    assertEquals(-1, rc);
+
+    /* Clean up to be a good citizen */
+    deleteKey(ks, "keyattr1");
+    deleteKey(ks, "keyattr2");
+    deleteKey(ks, "keyattr3");
   }
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java Tue Jul 15 21:10:24 2014
@@ -84,6 +84,19 @@ public class TestAclCommands {
   }
 
   @Test
+  public void testSetfaclValidationsWithoutPermissions() throws Exception {
+    List<AclEntry> parsedList = new ArrayList<AclEntry>();
+    try {
+      parsedList = AclEntry.parseAclSpec("user:user1:", true);
+    } catch (IllegalArgumentException e) {
+    }
+    assertTrue(parsedList.size() == 0);
+    assertFalse("setfacl should fail with less arguments",
+        0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:",
+            "/path" }));
+  }
+
+  @Test
   public void testMultipleAclSpecParsing() throws Exception {
     List<AclEntry> parsedList = AclEntry.parseAclSpec(
         "group::rwx,user:user1:rwx,user:user2:rw-,"

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Tue Jul 15 21:10:24 2014
@@ -49,7 +49,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 
 public class TestNativeIO {
@@ -572,7 +571,6 @@ public class TestNativeIO {
   @Test(timeout=10000)
   public void testMlock() throws Exception {
     assumeTrue(NativeIO.isAvailable());
-    assumeTrue(Shell.LINUX);
     final File TEST_FILE = new File(new File(
         System.getProperty("test.build.data","build/test/data")),
         "testMlockFile");
@@ -607,8 +605,8 @@ public class TestNativeIO {
         sum += mapbuf.get(i);
       }
       assertEquals("Expected sums to be equal", bufSum, sum);
-      // munlock the buffer
-      NativeIO.POSIX.munlock(mapbuf, fileSize);
+      // munmap the buffer, which also implicitly unlocks it
+      NativeIO.POSIX.munmap(mapbuf);
     } finally {
       if (channel != null) {
         channel.close();

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java Tue Jul 15 21:10:24 2014
@@ -60,12 +60,12 @@ public class TestGangliaMetrics {
   @Test
   public void testTagsForPrefix() throws Exception {
     ConfigBuilder cb = new ConfigBuilder()
-      .add("Test.sink.ganglia.tagsForPrefix.all", "*")
-      .add("Test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " +
+      .add("test.sink.ganglia.tagsForPrefix.all", "*")
+      .add("test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " +
               "NumActiveSources")
-      .add("Test.sink.ganglia.tagsForPrefix.none", "");
+      .add("test.sink.ganglia.tagsForPrefix.none", "");
     GangliaSink30 sink = new GangliaSink30();
-    sink.init(cb.subset("Test.sink.ganglia"));
+    sink.init(cb.subset("test.sink.ganglia"));
 
     List<MetricsTag> tags = new ArrayList<MetricsTag>();
     tags.add(new MetricsTag(MsInfo.Context, "all"));
@@ -98,8 +98,8 @@ public class TestGangliaMetrics {
   
   @Test public void testGangliaMetrics2() throws Exception {
     ConfigBuilder cb = new ConfigBuilder().add("default.period", 10)
-        .add("Test.sink.gsink30.context", "test") // filter out only "test"
-        .add("Test.sink.gsink31.context", "test") // filter out only "test"
+        .add("test.sink.gsink30.context", "test") // filter out only "test"
+        .add("test.sink.gsink31.context", "test") // filter out only "test"
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
 
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java Tue Jul 15 21:10:24 2014
@@ -88,11 +88,11 @@ public class TestMetricsSystemImpl {
     DefaultMetricsSystem.shutdown();
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
-        .add("Test.sink.test.class", TestSink.class.getName())
-        .add("Test.*.source.filter.exclude", "s0")
-        .add("Test.source.s1.metric.filter.exclude", "X*")
-        .add("Test.sink.sink1.metric.filter.exclude", "Y*")
-        .add("Test.sink.sink2.metric.filter.exclude", "Y*")
+        .add("test.sink.test.class", TestSink.class.getName())
+        .add("test.*.source.filter.exclude", "s0")
+        .add("test.source.s1.metric.filter.exclude", "X*")
+        .add("test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("test.sink.sink2.metric.filter.exclude", "Y*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -130,11 +130,11 @@ public class TestMetricsSystemImpl {
     DefaultMetricsSystem.shutdown(); 
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
-        .add("Test.sink.test.class", TestSink.class.getName())
-        .add("Test.*.source.filter.exclude", "s0")
-        .add("Test.source.s1.metric.filter.exclude", "X*")
-        .add("Test.sink.sink1.metric.filter.exclude", "Y*")
-        .add("Test.sink.sink2.metric.filter.exclude", "Y*")
+        .add("test.sink.test.class", TestSink.class.getName())
+        .add("test.*.source.filter.exclude", "s0")
+        .add("test.source.s1.metric.filter.exclude", "X*")
+        .add("test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("test.sink.sink2.metric.filter.exclude", "Y*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -169,13 +169,14 @@ public class TestMetricsSystemImpl {
   @Test public void testMultiThreadedPublish() throws Exception {
     final int numThreads = 10;
     new ConfigBuilder().add("*.period", 80)
-      .add("Test.sink.Collector." + MetricsConfig.QUEUE_CAPACITY_KEY,
+      .add("test.sink.collector." + MetricsConfig.QUEUE_CAPACITY_KEY,
               numThreads)
       .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     final MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
+
     final CollectingSink sink = new CollectingSink(numThreads);
-    ms.registerSink("Collector",
+    ms.registerSink("collector",
         "Collector of values from all threads.", sink);
     final TestSource[] sources = new TestSource[numThreads];
     final Thread[] threads = new Thread[numThreads];
@@ -280,10 +281,10 @@ public class TestMetricsSystemImpl {
 
   @Test public void testHangingSink() {
     new ConfigBuilder().add("*.period", 8)
-      .add("Test.sink.test.class", TestSink.class.getName())
-      .add("Test.sink.hanging.retry.delay", "1")
-      .add("Test.sink.hanging.retry.backoff", "1.01")
-      .add("Test.sink.hanging.retry.count", "0")
+      .add("test.sink.test.class", TestSink.class.getName())
+      .add("test.sink.hanging.retry.delay", "1")
+      .add("test.sink.hanging.retry.backoff", "1.01")
+      .add("test.sink.hanging.retry.count", "0")
       .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java Tue Jul 15 21:10:24 2014
@@ -19,12 +19,16 @@ package org.apache.hadoop.mount;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.SimpleTcpServer;
 import org.apache.hadoop.oncrpc.SimpleUdpServer;
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
 
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
 /**
  * Main class for starting mountd daemon. This daemon implements the NFS
  * mount protocol. When receiving a MOUNT request from an NFS client, it checks
@@ -33,6 +37,7 @@ import org.apache.hadoop.util.ShutdownHo
  * handle for requested directory and returns it to the client.
  */
 abstract public class MountdBase {
+  public static final Log LOG = LogFactory.getLog(MountdBase.class);
   private final RpcProgram rpcProgram;
   private int udpBoundPort; // Will set after server starts
   private int tcpBoundPort; // Will set after server starts
@@ -40,11 +45,11 @@ abstract public class MountdBase {
   public RpcProgram getRpcProgram() {
     return rpcProgram;
   }
-  
+
   /**
    * Constructor
    * @param program
-   * @throws IOException 
+   * @throws IOException
    */
   public MountdBase(RpcProgram program) throws IOException {
     rpcProgram = program;
@@ -74,11 +79,16 @@ abstract public class MountdBase {
     if (register) {
       ShutdownHookManager.get().addShutdownHook(new Unregister(),
           SHUTDOWN_HOOK_PRIORITY);
-      rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort);
-      rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
+      try {
+        rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort);
+        rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
+      } catch (Throwable e) {
+        LOG.fatal("Failed to start the server. Cause:", e);
+        terminate(1, e);
+      }
     }
   }
-  
+
   /**
    * Priority of the mountd shutdown hook.
    */
@@ -91,5 +101,5 @@ abstract public class MountdBase {
       rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
     }
   }
-  
+
 }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java Tue Jul 15 21:10:24 2014
@@ -71,7 +71,16 @@ public class NfsExports {
   
   private static final Pattern CIDR_FORMAT_LONG = 
       Pattern.compile(SLASH_FORMAT_LONG);
-  
+
+  // Hostnames are composed of series of 'labels' concatenated with dots.
+  // Labels can be between 1-63 characters long, and can only take
+  // letters, digits & hyphens. They cannot start and end with hyphens. For
+  // more details, refer RFC-1123 & http://en.wikipedia.org/wiki/Hostname
+  private static final String LABEL_FORMAT =
+      "[a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?";
+  private static final Pattern HOSTNAME_FORMAT =
+      Pattern.compile("^(" + LABEL_FORMAT + "\\.)*" + LABEL_FORMAT + "$");
+
   static class AccessCacheEntry implements LightWeightCache.Entry{
     private final String hostAddr;
     private AccessPrivilege access;
@@ -381,10 +390,14 @@ public class NfsExports {
         LOG.debug("Using Regex match for '" + host + "' and " + privilege);
       }
       return new RegexMatch(privilege, host);
+    } else if (HOSTNAME_FORMAT.matcher(host).matches()) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using exact match for '" + host + "' and " + privilege);
+      }
+      return new ExactMatch(privilege, host);
+    } else {
+      throw new IllegalArgumentException("Invalid hostname provided '" + host
+          + "'");
     }
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Using exact match for '" + host + "' and " + privilege);
-    }
-    return new ExactMatch(privilege, host);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java Tue Jul 15 21:10:24 2014
@@ -25,6 +25,8 @@ import org.apache.hadoop.oncrpc.SimpleTc
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
 
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
 /**
  * Nfs server. Supports NFS v3 using {@link RpcProgram}.
  * Currently Mountd program is also started inside this class.
@@ -34,7 +36,7 @@ public abstract class Nfs3Base {
   public static final Log LOG = LogFactory.getLog(Nfs3Base.class);
   private final RpcProgram rpcProgram;
   private int nfsBoundPort; // Will set after server starts
-    
+
   public RpcProgram getRpcProgram() {
     return rpcProgram;
   }
@@ -46,11 +48,16 @@ public abstract class Nfs3Base {
 
   public void start(boolean register) {
     startTCPServer(); // Start TCP server
-    
+
     if (register) {
       ShutdownHookManager.get().addShutdownHook(new Unregister(),
           SHUTDOWN_HOOK_PRIORITY);
-      rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort);
+      try {
+        rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort);
+      } catch (Throwable e) {
+        LOG.fatal("Failed to start the server. Cause:", e);
+        terminate(1, e);
+      }
     }
   }
 
@@ -61,7 +68,7 @@ public abstract class Nfs3Base {
     tcpServer.run();
     nfsBoundPort = tcpServer.getBoundPort();
   }
-  
+
   /**
    * Priority of the nfsd shutdown hook.
    */

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java Tue Jul 15 21:10:24 2014
@@ -131,7 +131,7 @@ public abstract class RpcProgram extends
     } catch (IOException e) {
       String request = set ? "Registration" : "Unregistration";
       LOG.error(request + " failure with " + host + ":" + port
-          + ", portmap entry: " + mapEntry, e);
+          + ", portmap entry: " + mapEntry);
       throw new RuntimeException(request + " failure", e);
     }
   }

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpClient.java Tue Jul 15 21:10:24 2014
@@ -60,6 +60,7 @@ public class SimpleUdpClient {
       DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length,
           IPAddress, port);
       socket.send(sendPacket);
+      socket.setSoTimeout(500);
       DatagramPacket receivePacket = new DatagramPacket(receiveData,
           receiveData.length);
       socket.receive(receivePacket);

Modified: hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java?rev=1610853&r1=1610852&r2=1610853&view=diff
==============================================================================
--- hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java (original)
+++ hadoop/common/branches/fs-encryption/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java Tue Jul 15 21:10:24 2014
@@ -194,4 +194,16 @@ public class TestNfsExports {
     } while ((System.nanoTime() - startNanos) / NanosPerMillis < 5000);
     Assert.assertEquals(AccessPrivilege.NONE, ap);
   }
+
+  @Test(expected=IllegalArgumentException.class)
+  public void testInvalidHost() {
+      NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "foo#bar");
+  }
+
+  @Test(expected=IllegalArgumentException.class)
+  public void testInvalidSeparator() {
+      NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "foo ro : bar rw");
+  }
 }