You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cm...@apache.org on 2014/08/20 01:50:11 UTC

svn commit: r1619012 [11/14] - in /hadoop/common/branches/HADOOP-10388/hadoop-common-project: ./ hadoop-auth/ hadoop-auth/dev-support/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apache/hado...

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Tue Aug 19 23:49:39 2014
@@ -17,35 +17,41 @@
  */
 package org.apache.hadoop.crypto.key;
 
-import static org.junit.Assert.*;
-
 import java.io.ByteArrayOutputStream;
 import java.io.File;
+import java.io.IOException;
 import java.io.PrintStream;
 import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class TestKeyShell {
   private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
   private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
 
-  private static File tmpDir;
-
   private PrintStream initialStdOut;
   private PrintStream initialStdErr;
 
+  /* The default JCEKS provider - for testing purposes */
+  private String jceksProvider;
+
   @Before
   public void setup() throws Exception {
     outContent.reset();
     errContent.reset();
-    tmpDir = new File(System.getProperty("test.build.data", "target"),
+    final File tmpDir = new File(System.getProperty("test.build.data", "target"),
         UUID.randomUUID().toString());
-    tmpDir.mkdirs();
+    if (!tmpDir.mkdirs()) {
+      throw new IOException("Unable to create " + tmpDir);
+    }
+    jceksProvider = "jceks://file" + tmpDir + "/keystore.jceks";
     initialStdOut = System.out;
     initialStdErr = System.err;
     System.setOut(new PrintStream(outContent));
@@ -58,102 +64,137 @@ public class TestKeyShell {
     System.setErr(initialStdErr);
   }
 
+  /**
+   * Delete a key from the default jceksProvider
+   * @param ks The KeyShell instance
+   * @param keyName The key to delete
+   * @throws Exception
+   */
+  private void deleteKey(KeyShell ks, String keyName) throws Exception {
+    int rc;
+    outContent.reset();
+    final String[] delArgs = {"delete", keyName, "-provider", jceksProvider};
+    rc = ks.run(delArgs);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully deleted."));
+  }
+
+  /**
+   * Lists the keys in the jceksProvider
+   * @param ks The KeyShell instance
+   * @param wantMetadata True if you want metadata returned with the keys
+   * @return The output from the "list" call
+   * @throws Exception
+   */
+  private String listKeys(KeyShell ks, boolean wantMetadata) throws Exception {
+    int rc;
+    outContent.reset();
+    final String[] listArgs = {"list", "-provider", jceksProvider };
+    final String[] listArgsM = {"list", "-metadata", "-provider", jceksProvider };
+    rc = ks.run(wantMetadata ? listArgsM : listArgs);
+    assertEquals(0, rc);
+    return outContent.toString();
+  }
+
   @Test
   public void testKeySuccessfulKeyLifecycle() throws Exception {
-    outContent.reset();
-    String[] args1 = {"create", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
     int rc = 0;
+    String keyName = "key1";
+
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
-    rc = ks.run(args1);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"created."));
 
     outContent.reset();
-    String[] args2 = {"list", "--provider",
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2);
+    final String[] args1 = {"create", keyName, "-provider", jceksProvider};
+    rc = ks.run(args1);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully created"));
 
-    outContent.reset();
-    String[] args2a = {"list", "--metadata", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2a);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
-    assertTrue(outContent.toString().contains("description"));
-    assertTrue(outContent.toString().contains("created"));
+    String listOut = listKeys(ks, false);
+    assertTrue(listOut.contains(keyName));
+
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains(keyName));
+    assertTrue(listOut.contains("description"));
+    assertTrue(listOut.contains("created"));
 
     outContent.reset();
-    String[] args3 = {"roll", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args3);
+    final String[] args2 = {"roll", keyName, "-provider", jceksProvider};
+    rc = ks.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"rolled."));
 
+    deleteKey(ks, keyName);
+
+    listOut = listKeys(ks, false);
+    assertFalse(listOut, listOut.contains(keyName));
+  }
+  
+  /* HADOOP-10586 KeyShell didn't allow -description. */
+  @Test
+  public void testKeySuccessfulCreationWithDescription() throws Exception {
     outContent.reset();
-    String[] args4 = {"delete", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args4);
+    final String[] args1 = {"create", "key1", "-provider", jceksProvider,
+                      "-description", "someDescription"};
+    int rc = 0;
+    KeyShell ks = new KeyShell();
+    ks.setConf(new Configuration());
+    rc = ks.run(args1);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"deleted."));
+        "created"));
 
-    outContent.reset();
-    String[] args5 = {"list", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args5);
-    assertEquals(0, rc);
-    assertFalse(outContent.toString(), outContent.toString().contains("key1"));
+    String listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("description"));
+    assertTrue(listOut.contains("someDescription"));
   }
-  
+
   @Test
   public void testInvalidKeySize() throws Exception {
-    String[] args1 = {"create", "key1", "--size", "56", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    
+    final String[] args1 = {"create", "key1", "-size", "56", "-provider",
+            jceksProvider};
+
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
-    assertEquals(-1, rc);
-    assertTrue(outContent.toString().contains("key1 has NOT been created."));
+    assertEquals(1, rc);
+    assertTrue(outContent.toString().contains("key1 has not been created."));
   }
 
   @Test
   public void testInvalidCipher() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    
+    final String[] args1 = {"create", "key1", "-cipher", "LJM", "-provider",
+            jceksProvider};
+
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
-    assertEquals(-1, rc);
-    assertTrue(outContent.toString().contains("key1 has NOT been created."));
+    assertEquals(1, rc);
+    assertTrue(outContent.toString().contains("key1 has not been created."));
   }
 
   @Test
   public void testInvalidProvider() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES", "--provider", 
+    final String[] args1 = {"create", "key1", "-cipher", "AES", "-provider",
       "sdff://file/tmp/keystore.jceks"};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("There are no valid " +
     		"KeyProviders configured."));
   }
 
   @Test
   public void testTransientProviderWarning() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES", "--provider", 
+    final String[] args1 = {"create", "key1", "-cipher", "AES", "-provider",
       "user:///"};
     
     int rc = 0;
@@ -167,7 +208,7 @@ public class TestKeyShell {
   
   @Test
   public void testTransientProviderOnlyConfig() throws Exception {
-    String[] args1 = {"create", "key1"};
+    final String[] args1 = {"create", "key1"};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
@@ -175,30 +216,103 @@ public class TestKeyShell {
     config.set(KeyProviderFactory.KEY_PROVIDER_PATH, "user:///");
     ks.setConf(config);
     rc = ks.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("There are no valid " +
     		"KeyProviders configured."));
   }
 
   @Test
   public void testFullCipher() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES/CBC/pkcs5Padding", 
-        "--provider", "jceks://file" + tmpDir + "/keystore.jceks"};
+    final String keyName = "key1";
+    final String[] args1 = {"create", keyName, "-cipher", "AES/CBC/pkcs5Padding",
+        "-provider", jceksProvider};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"created."));
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully created"));
+
+    deleteKey(ks, keyName);
+  }
+
+  @Test
+  public void testAttributes() throws Exception {
+    int rc;
+    KeyShell ks = new KeyShell();
+    ks.setConf(new Configuration());
+
+    /* Simple creation test */
+    final String[] args1 = {"create", "keyattr1", "-provider", jceksProvider,
+            "-attr", "foo=bar"};
+    rc = ks.run(args1);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains("keyattr1 has been " +
+            "successfully created"));
+
+    /* ...and list to see that we have the attr */
+    String listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr1"));
+    assertTrue(listOut.contains("attributes: [foo=bar]"));
+
+    /* Negative tests: no attribute */
+    outContent.reset();
+    final String[] args2 = {"create", "keyattr2", "-provider", jceksProvider,
+            "-attr", "=bar"};
+    rc = ks.run(args2);
+    assertEquals(1, rc);
 
+    /* Not in attribute = value form */
     outContent.reset();
-    String[] args2 = {"delete", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
+    args2[5] = "foo";
+    rc = ks.run(args2);
+    assertEquals(1, rc);
+
+    /* No attribute or value */
+    outContent.reset();
+    args2[5] = "=";
+    rc = ks.run(args2);
+    assertEquals(1, rc);
+
+    /* Legal: attribute is a, value is b=c */
+    outContent.reset();
+    args2[5] = "a=b=c";
     rc = ks.run(args2);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"deleted."));
+
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr2"));
+    assertTrue(listOut.contains("attributes: [a=b=c]"));
+
+    /* Test several attrs together... */
+    outContent.reset();
+    final String[] args3 = {"create", "keyattr3", "-provider", jceksProvider,
+            "-attr", "foo = bar",
+            "-attr", " glarch =baz  ",
+            "-attr", "abc=def"};
+    rc = ks.run(args3);
+    assertEquals(0, rc);
+
+    /* ...and list to ensure they're there. */
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr3"));
+    assertTrue(listOut.contains("[foo=bar]"));
+    assertTrue(listOut.contains("[glarch=baz]"));
+    assertTrue(listOut.contains("[abc=def]"));
+
+    /* Negative test - repeated attributes should fail */
+    outContent.reset();
+    final String[] args4 = {"create", "keyattr4", "-provider", jceksProvider,
+            "-attr", "foo=bar",
+            "-attr", "foo=glarch"};
+    rc = ks.run(args4);
+    assertEquals(1, rc);
+
+    /* Clean up to be a good citizen */
+    deleteKey(ks, "keyattr1");
+    deleteKey(ks, "keyattr2");
+    deleteKey(ks, "keyattr3");
   }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java Tue Aug 19 23:49:39 2014
@@ -23,7 +23,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.StringTokenizer;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java Tue Aug 19 23:49:39 2014
@@ -21,7 +21,7 @@ package org.apache.hadoop.fs;
 import java.io.*;
 import java.util.ArrayList;
 import java.util.regex.Pattern;
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Shell;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java Tue Aug 19 23:49:39 2014
@@ -29,14 +29,33 @@ import java.util.Random;
 
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
+
 import static org.junit.Assert.*;
 
 public class TestDFVariations {
-
+  private static final String TEST_ROOT_DIR =
+      System.getProperty("test.build.data","build/test/data") + "/TestDFVariations";
+  private static File test_root = null;
+
+  @Before
+  public void setup() throws IOException {
+    test_root = new File(TEST_ROOT_DIR);
+    test_root.mkdirs();
+  }
+  
+  @After
+  public void after() throws IOException {
+    FileUtil.setWritable(test_root, true);
+    FileUtil.fullyDelete(test_root);
+    assertTrue(!test_root.exists());
+  }
+  
   public static class XXDF extends DF {
     public XXDF() throws IOException {
-      super(new File(System.getProperty("test.build.data","/tmp")), 0L);
+      super(test_root, 0L);
     }
 
     @Override

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java Tue Aug 19 23:49:39 2014
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.IOException;
 import java.util.Set;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.junit.After;
 import org.junit.Before;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java Tue Aug 19 23:49:39 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.fs;
 
-import static junit.framework.Assert.assertSame;
-import static junit.framework.Assert.assertNotSame;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertNotSame;
 
 import java.io.IOException;
 import java.net.URI;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java Tue Aug 19 23:49:39 2014
@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
@@ -36,6 +37,7 @@ import java.lang.reflect.Modifier;
 import java.util.EnumSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
 
 import static org.apache.hadoop.fs.Options.ChecksumOpt;
 import static org.apache.hadoop.fs.Options.CreateOpts;
@@ -138,6 +140,7 @@ public class TestHarFileSystem {
     public int getDefaultPort();
     public String getCanonicalServiceName();
     public Token<?> getDelegationToken(String renewer) throws IOException;
+    public FileChecksum getFileChecksum(Path f) throws IOException;
     public boolean deleteOnExit(Path f) throws IOException;
     public boolean cancelDeleteOnExit(Path f) throws IOException;
     public Token<?>[] addDelegationTokens(String renewer, Credentials creds)
@@ -181,7 +184,26 @@ public class TestHarFileSystem {
 
     public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException;
 
+    public void setXAttr(Path path, String name, byte[] value)
+        throws IOException;
+
+    public void setXAttr(Path path, String name, byte[] value,
+        EnumSet<XAttrSetFlag> flag) throws IOException;
+
+    public byte[] getXAttr(Path path, String name) throws IOException;
+
+    public Map<String, byte[]> getXAttrs(Path path) throws IOException;
+
+    public Map<String, byte[]> getXAttrs(Path path, List<String> names)
+        throws IOException;
+
+    public List<String> listXAttrs(Path path) throws IOException;
+
+    public void removeXAttr(Path path, String name) throws IOException;
+
     public AclStatus getAclStatus(Path path) throws IOException;
+
+    public void access(Path path, FsAction mode) throws IOException;
   }
 
   @Test
@@ -207,10 +229,16 @@ public class TestHarFileSystem {
   }
 
   @Test
-  public void testFileChecksum() {
+  public void testFileChecksum() throws Exception {
     final Path p = new Path("har://file-localhost/foo.har/file1");
     final HarFileSystem harfs = new HarFileSystem();
-    Assert.assertEquals(null, harfs.getFileChecksum(p));
+    try {
+      Assert.assertEquals(null, harfs.getFileChecksum(p));
+    } finally {
+      if (harfs != null) {
+        harfs.close();
+      }
+    }
   }
 
   /**

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java Tue Aug 19 23:49:39 2014
@@ -397,7 +397,7 @@ public class TestHardLink {
     //basic checks on array lengths
     assertEquals(5, win.hardLinkCommand.length); 
     assertEquals(7, win.hardLinkMultPrefix.length);
-    assertEquals(8, win.hardLinkMultSuffix.length);
+    assertEquals(7, win.hardLinkMultSuffix.length);
     assertEquals(4, win.getLinkCountCommand.length);
 
     assertTrue(win.hardLinkMultPrefix[4].equals("%f"));
@@ -406,9 +406,6 @@ public class TestHardLink {
     assertTrue(win.hardLinkMultDir.equals("\\%f"));
     //make sure "\\%f" was munged correctly
     assertEquals(3, ("\\%f").length()); 
-    assertTrue(win.hardLinkMultSuffix[7].equals("1>NUL"));
-    //make sure "1>NUL" was not munged
-    assertEquals(5, ("1>NUL").length()); 
     assertTrue(win.getLinkCountCommand[1].equals("hardlink"));
     //make sure "-c%h" was not munged
     assertEquals(4, ("-c%h").length()); 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Tue Aug 19 23:49:39 2014
@@ -227,7 +227,7 @@ public class TestLocalFileSystem {
     try {
       fileSys.mkdirs(bad_dir);
       fail("Failed to detect existing file in path");
-    } catch (FileAlreadyExistsException e) { 
+    } catch (ParentNotDirectoryException e) {
       // Expected
     }
     

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java Tue Aug 19 23:49:39 2014
@@ -45,15 +45,15 @@ public class TestStat extends FileSystem
     final String doesNotExist;
     final String directory;
     final String file;
-    final String symlink;
+    final String[] symlinks;
     final String stickydir;
 
     StatOutput(String doesNotExist, String directory, String file,
-        String symlink, String stickydir) {
+        String[] symlinks, String stickydir) {
       this.doesNotExist = doesNotExist;
       this.directory = directory;
       this.file = file;
-      this.symlink = symlink;
+      this.symlinks = symlinks;
       this.stickydir = stickydir;
     }
 
@@ -78,10 +78,12 @@ public class TestStat extends FileSystem
       status = stat.getFileStatusForTesting();
       assertTrue(status.isFile());
 
-      br = new BufferedReader(new StringReader(symlink));
-      stat.parseExecResult(br);
-      status = stat.getFileStatusForTesting();
-      assertTrue(status.isSymlink());
+      for (String symlink : symlinks) {
+        br = new BufferedReader(new StringReader(symlink));
+        stat.parseExecResult(br);
+        status = stat.getFileStatusForTesting();
+        assertTrue(status.isSymlink());
+      }
 
       br = new BufferedReader(new StringReader(stickydir));
       stat.parseExecResult(br);
@@ -93,22 +95,30 @@ public class TestStat extends FileSystem
 
   @Test(timeout=10000)
   public void testStatLinux() throws Exception {
+    String[] symlinks = new String[] {
+        "6,symbolic link,1373584236,1373584236,777,andrew,andrew,`link' -> `target'",
+        "6,symbolic link,1373584236,1373584236,777,andrew,andrew,'link' -> 'target'"
+    };
     StatOutput linux = new StatOutput(
         "stat: cannot stat `watermelon': No such file or directory",
         "4096,directory,1373584236,1373586485,755,andrew,root,`.'",
         "0,regular empty file,1373584228,1373584228,644,andrew,andrew,`target'",
-        "6,symbolic link,1373584236,1373584236,777,andrew,andrew,`link' -> `target'",
+        symlinks,
         "4096,directory,1374622334,1375124212,1755,andrew,andrew,`stickydir'");
     linux.test();
   }
 
   @Test(timeout=10000)
   public void testStatFreeBSD() throws Exception {
+    String[] symlinks = new String[] {
+        "6,Symbolic Link,1373508941,1373508941,120755,awang,awang,`link' -> `target'"
+    };
+    
     StatOutput freebsd = new StatOutput(
         "stat: symtest/link: stat: No such file or directory",
         "512,Directory,1373583695,1373583669,40755,awang,awang,`link' -> `'",
         "0,Regular File,1373508937,1373508937,100644,awang,awang,`link' -> `'",
-        "6,Symbolic Link,1373508941,1373508941,120755,awang,awang,`link' -> `target'",
+        symlinks,
         "512,Directory,1375139537,1375139537,41755,awang,awang,`link' -> `'");
     freebsd.test();
   }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java Tue Aug 19 23:49:39 2014
@@ -84,6 +84,19 @@ public class TestAclCommands {
   }
 
   @Test
+  public void testSetfaclValidationsWithoutPermissions() throws Exception {
+    List<AclEntry> parsedList = new ArrayList<AclEntry>();
+    try {
+      parsedList = AclEntry.parseAclSpec("user:user1:", true);
+    } catch (IllegalArgumentException e) {
+    }
+    assertTrue(parsedList.size() == 0);
+    assertFalse("setfacl should fail with less arguments",
+        0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:",
+            "/path" }));
+  }
+
+  @Test
   public void testMultipleAclSpecParsing() throws Exception {
     List<AclEntry> parsedList = AclEntry.parseAclSpec(
         "group::rwx,user:user1:rwx,user:user2:rw-,"

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java Tue Aug 19 23:49:39 2014
@@ -74,6 +74,8 @@ public class TestCopyPreserveFlag {
     output.close();
     fs.setTimes(FROM, MODIFICATION_TIME, 0);
     fs.setPermission(FROM, PERMISSIONS);
+    fs.setTimes(new Path("d1"), MODIFICATION_TIME, 0);
+    fs.setPermission(new Path("d1"), PERMISSIONS);
   }
 
   @After
@@ -132,4 +134,22 @@ public class TestCopyPreserveFlag {
       run(new Cp(), FROM.toString(), TO.toString());
       assertAttributesChanged();
   }
+
+  @Test(timeout = 10000)
+  public void testDirectoryCpWithP() throws Exception {
+    run(new Cp(), "-p", "d1", "d3");
+    assertEquals(fs.getFileStatus(new Path("d1")).getModificationTime(),
+        fs.getFileStatus(new Path("d3")).getModificationTime());
+    assertEquals(fs.getFileStatus(new Path("d1")).getPermission(),
+        fs.getFileStatus(new Path("d3")).getPermission());
+  }
+
+  @Test(timeout = 10000)
+  public void testDirectoryCpWithoutP() throws Exception {
+    run(new Cp(), "d1", "d4");
+    assertTrue(fs.getFileStatus(new Path("d1")).getModificationTime() !=
+        fs.getFileStatus(new Path("d4")).getModificationTime());
+    assertTrue(!fs.getFileStatus(new Path("d1")).getPermission()
+        .equals(fs.getFileStatus(new Path("d4")).getPermission()));
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java Tue Aug 19 23:49:39 2014
@@ -35,19 +35,22 @@ import org.junit.Before;
 import org.junit.Test;
 
 public class TestPathData {
+  private static final String TEST_ROOT_DIR = 
+      System.getProperty("test.build.data","build/test/data") + "/testPD";
   protected Configuration conf;
   protected FileSystem fs;
   protected Path testDir;
-
+  
   @Before
   public void initialize() throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testDir = new Path(
-        System.getProperty("test.build.data", "build/test/data") + "/testPD"
-    );
+    testDir = new Path(TEST_ROOT_DIR);
+    
     // don't want scheme on the path, just an absolute path
     testDir = new Path(fs.makeQualified(testDir).toUri().getPath());
+    fs.mkdirs(testDir);
+
     FileSystem.setDefaultUri(conf, fs.getUri());    
     fs.setWorkingDirectory(testDir);
     fs.mkdirs(new Path("d1"));
@@ -60,6 +63,7 @@ public class TestPathData {
 
   @After
   public void cleanup() throws Exception {
+    fs.delete(testDir, true);
     fs.close();
   }
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java Tue Aug 19 23:49:39 2014
@@ -19,11 +19,13 @@
 package org.apache.hadoop.fs.shell;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.ipc.RemoteException;
 import org.junit.Test;
 
 public class TestPathExceptions {
@@ -52,5 +54,25 @@ public class TestPathExceptions {
     assertEquals(new Path(path), pe.getPath());
     assertEquals("`" + path + "': " + error, pe.getMessage());
   }
-  
+
+  @Test
+  public void testRemoteExceptionUnwrap() throws Exception {
+    PathIOException pe;
+    RemoteException re;
+    IOException ie;
+    
+    pe = new PathIOException(path);
+    re = new RemoteException(PathIOException.class.getName(), "test constructor1");
+    ie = re.unwrapRemoteException();
+    assertTrue(ie instanceof PathIOException);
+    ie = re.unwrapRemoteException(PathIOException.class);
+    assertTrue(ie instanceof PathIOException);
+
+    pe = new PathIOException(path, "constructor2");
+    re = new RemoteException(PathIOException.class.getName(), "test constructor2");
+    ie = re.unwrapRemoteException();
+    assertTrue(ie instanceof PathIOException);
+    ie = re.unwrapRemoteException(PathIOException.class);
+    assertTrue(ie instanceof PathIOException);    
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java Tue Aug 19 23:49:39 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.viewfs;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.ArrayList;
 import java.util.List;
 
 
@@ -28,9 +29,16 @@ import org.apache.hadoop.fs.BlockLocatio
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
 import static org.apache.hadoop.fs.FileSystemTestHelper.*;
+import org.apache.hadoop.fs.permission.AclEntry;
+import static org.apache.hadoop.fs.viewfs.Constants.PERMISSION_555;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.AclUtil;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
@@ -38,6 +46,7 @@ import org.apache.hadoop.fs.viewfs.ViewF
 import org.apache.hadoop.fs.viewfs.ViewFileSystem.MountPoint;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.junit.After;
 import org.junit.Assert;
@@ -96,7 +105,6 @@ public class ViewFileSystemBaseTest {
     // in the test root
     
     // Set up the defaultMT in the config with our mount point links
-    //Configuration conf = new Configuration();
     conf = ViewFileSystemTestSetup.createConfig();
     setupMountPoints();
     fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf);
@@ -720,4 +728,79 @@ public class ViewFileSystemBaseTest {
     Assert.assertTrue("Other-readable permission not set!",
         perms.getOtherAction().implies(FsAction.READ));
   }
+
+  /**
+   * Verify the behavior of ACL operations on paths above the root of
+   * any mount table entry.
+   */
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalModifyAclEntries() throws IOException {
+    fsView.modifyAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAclEntries() throws IOException {
+    fsView.removeAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveDefaultAcl() throws IOException {
+    fsView.removeDefaultAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAcl() throws IOException {
+    fsView.removeAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetAcl() throws IOException {
+    fsView.setAcl(new Path("/internalDir"), new ArrayList<AclEntry>());
+  }
+
+  @Test
+  public void testInternalGetAclStatus() throws IOException {
+    final UserGroupInformation currentUser =
+        UserGroupInformation.getCurrentUser();
+    AclStatus aclStatus = fsView.getAclStatus(new Path("/internalDir"));
+    assertEquals(aclStatus.getOwner(), currentUser.getUserName());
+    assertEquals(aclStatus.getGroup(), currentUser.getGroupNames()[0]);
+    assertEquals(aclStatus.getEntries(),
+        AclUtil.getMinimalAcl(PERMISSION_555));
+    assertFalse(aclStatus.isStickyBit());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetXAttr() throws IOException {
+    fsView.setXAttr(new Path("/internalDir"), "xattrName", null);
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttr() throws IOException {
+    fsView.getXAttr(new Path("/internalDir"), "xattrName");
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrs() throws IOException {
+    fsView.getXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrsWithNames() throws IOException {
+    fsView.getXAttrs(new Path("/internalDir"), new ArrayList<String>());
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalListXAttr() throws IOException {
+    fsView.listXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveXAttr() throws IOException {
+    fsView.removeXAttr(new Path("/internalDir"), "xattrName");
+  }
+
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java Tue Aug 19 23:49:39 2014
@@ -22,10 +22,14 @@ import static org.apache.hadoop.fs.FileC
 import static org.apache.hadoop.fs.FileContextTestHelper.exists;
 import static org.apache.hadoop.fs.FileContextTestHelper.isDir;
 import static org.apache.hadoop.fs.FileContextTestHelper.isFile;
+import static org.apache.hadoop.fs.viewfs.Constants.PERMISSION_555;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -39,8 +43,12 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.UnresolvedLinkException;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.AclUtil;
 import org.apache.hadoop.fs.viewfs.ViewFs.MountPoint;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.junit.After;
 import org.junit.Assert;
@@ -695,4 +703,78 @@ public class ViewFsBaseTest {
   public void testInternalSetOwner() throws IOException {
     fcView.setOwner(new Path("/internalDir"), "foo", "bar");
   }
+
+  /**
+   * Verify the behavior of ACL operations on paths above the root of
+   * any mount table entry.
+   */
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalModifyAclEntries() throws IOException {
+    fcView.modifyAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAclEntries() throws IOException {
+    fcView.removeAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveDefaultAcl() throws IOException {
+    fcView.removeDefaultAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAcl() throws IOException {
+    fcView.removeAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetAcl() throws IOException {
+    fcView.setAcl(new Path("/internalDir"), new ArrayList<AclEntry>());
+  }
+
+  @Test
+  public void testInternalGetAclStatus() throws IOException {
+    final UserGroupInformation currentUser =
+        UserGroupInformation.getCurrentUser();
+    AclStatus aclStatus = fcView.getAclStatus(new Path("/internalDir"));
+    assertEquals(aclStatus.getOwner(), currentUser.getUserName());
+    assertEquals(aclStatus.getGroup(), currentUser.getGroupNames()[0]);
+    assertEquals(aclStatus.getEntries(),
+        AclUtil.getMinimalAcl(PERMISSION_555));
+    assertFalse(aclStatus.isStickyBit());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetXAttr() throws IOException {
+    fcView.setXAttr(new Path("/internalDir"), "xattrName", null);
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttr() throws IOException {
+    fcView.getXAttr(new Path("/internalDir"), "xattrName");
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrs() throws IOException {
+    fcView.getXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrsWithNames() throws IOException {
+    fcView.getXAttrs(new Path("/internalDir"), new ArrayList<String>());
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalListXAttr() throws IOException {
+    fcView.listXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveXAttr() throws IOException {
+    fcView.removeXAttr(new Path("/internalDir"), "xattrName");
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java Tue Aug 19 23:49:39 2014
@@ -39,6 +39,7 @@ import org.junit.Assert;
 import org.mockito.Mockito;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveNotFoundException;
 import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
@@ -59,8 +60,9 @@ public class TestActiveStandbyElector {
     ActiveStandbyElectorTester(String hostPort, int timeout, String parent,
         List<ACL> acl, ActiveStandbyElectorCallback app) throws IOException,
         KeeperException {
-      super(hostPort, timeout, parent, acl,
-          Collections.<ZKAuthInfo>emptyList(), app);
+      super(hostPort, timeout, parent, acl, Collections
+          .<ZKAuthInfo> emptyList(), app,
+          CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT);
     }
 
     @Override
@@ -715,7 +717,8 @@ public class TestActiveStandbyElector {
   public void testWithoutZKServer() throws Exception {
     try {
       new ActiveStandbyElector("127.0.0.1", 2000, ZK_PARENT_NAME,
-          Ids.OPEN_ACL_UNSAFE, Collections.<ZKAuthInfo> emptyList(), mockApp);
+          Ids.OPEN_ACL_UNSAFE, Collections.<ZKAuthInfo> emptyList(), mockApp,
+          CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT);
       Assert.fail("Did not throw zookeeper connection loss exceptions!");
     } catch (KeeperException ke) {
       GenericTestUtils.assertExceptionContains( "ConnectionLoss", ke);

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java Tue Aug 19 23:49:39 2014
@@ -20,16 +20,15 @@ package org.apache.hadoop.ha;
 
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.util.Collections;
 import java.util.UUID;
 
 import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.State;
 import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
-import org.apache.hadoop.util.Shell;
 import org.apache.log4j.Level;
 import org.apache.zookeeper.ZooDefs.Ids;
 import org.apache.zookeeper.server.ZooKeeperServer;
@@ -61,8 +60,6 @@ public class TestActiveStandbyElectorRea
   
   @Override
   public void setUp() throws Exception {
-    // skip tests on Windows until after resolution of ZooKeeper client bug
-    assumeTrue(!Shell.WINDOWS);
     super.setUp();
     
     zkServer = getServer(serverFactory);
@@ -70,9 +67,9 @@ public class TestActiveStandbyElectorRea
     for (int i = 0; i < NUM_ELECTORS; i++) {
       cbs[i] =  Mockito.mock(ActiveStandbyElectorCallback.class);
       appDatas[i] = Ints.toByteArray(i);
-      electors[i] = new ActiveStandbyElector(
-          hostPort, 5000, PARENT_DIR, Ids.OPEN_ACL_UNSAFE,
-          Collections.<ZKAuthInfo>emptyList(), cbs[i]);
+      electors[i] = new ActiveStandbyElector(hostPort, 5000, PARENT_DIR,
+          Ids.OPEN_ACL_UNSAFE, Collections.<ZKAuthInfo> emptyList(), cbs[i],
+          CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT);
     }
   }
   

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java Tue Aug 19 23:49:39 2014
@@ -18,7 +18,6 @@
 package org.apache.hadoop.ha;
 
 import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
 
 import java.security.NoSuchAlgorithmException;
 
@@ -29,7 +28,6 @@ import org.apache.hadoop.ha.HAServicePro
 import org.apache.hadoop.ha.HealthMonitor.State;
 import org.apache.hadoop.ha.MiniZKFCCluster.DummyZKFC;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 import org.apache.log4j.Level;
 import org.apache.zookeeper.KeeperException;
@@ -68,8 +66,6 @@ public class TestZKFailoverController ex
   
   @Before
   public void setupConfAndServices() {
-    // skip tests on Windows until after resolution of ZooKeeper client bug
-    assumeTrue(!Shell.WINDOWS);
     conf = new Configuration();
     conf.set(ZKFailoverController.ZK_ACL_KEY, TEST_ACL);
     conf.set(ZKFailoverController.ZK_AUTH_KEY, TEST_AUTH_GOOD);
@@ -232,6 +228,27 @@ public class TestZKFailoverController ex
       cluster.stop();
     }
   }
+
+  /**
+   * Test that, when the health monitor indicates bad health status,
+   * failover is triggered. Also ensures that graceful active->standby
+   * transition is used when possible, falling back to fencing when
+   * the graceful approach fails.
+   */
+  @Test(timeout=15000)
+  public void testAutoFailoverOnBadState() throws Exception {
+    try {
+      cluster.start();
+      DummyHAService svc0 = cluster.getService(0);
+      LOG.info("Faking svc0 to change the state, should failover to svc1");
+      svc0.state = HAServiceState.STANDBY;
+      
+      // Should fail back to svc0 at this point
+      cluster.waitForHAState(1, HAServiceState.ACTIVE);
+    } finally {
+      cluster.stop();
+    }
+  }
   
   @Test(timeout=15000)
   public void testAutoFailoverOnLostZKSession() throws Exception {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java Tue Aug 19 23:49:39 2014
@@ -17,13 +17,11 @@
  */
 package org.apache.hadoop.ha;
 
-import static org.junit.Assume.assumeTrue;
-
 import java.util.Random;
 
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.util.Time;
 import org.junit.After;
 import org.junit.Before;
@@ -48,8 +46,6 @@ public class TestZKFailoverControllerStr
 
   @Before
   public void setupConfAndServices() throws Exception {
-    // skip tests on Windows until after resolution of ZooKeeper client bug
-    assumeTrue(!Shell.WINDOWS);
     conf = new Configuration();
     conf.set(ZKFailoverController.ZK_QUORUM_KEY, hostPort);
     this.cluster = new MiniZKFCCluster(conf, getServer(serverFactory));
@@ -126,8 +122,7 @@ public class TestZKFailoverControllerStr
         .when(cluster.getService(0).proxy).monitorHealth();
     Mockito.doAnswer(new RandomlyThrow(1))
         .when(cluster.getService(1).proxy).monitorHealth();
-    ActiveStandbyElector.NUM_RETRIES = 100;
-    
+    conf.setInt(CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_KEY, 100);
     // Don't start until after the above mocking. Otherwise we can get
     // Mockito errors if the HM calls the proxy in the middle of
     // setting up the mock.

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java Tue Aug 19 23:49:39 2014
@@ -13,7 +13,7 @@
  */
 package org.apache.hadoop.http;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.net.NetUtils;
@@ -36,6 +36,8 @@ import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
 import java.security.GeneralSecurityException;
+import java.net.HttpCookie;
+import java.util.List;
 
 public class TestHttpCookieFlag {
   private static final String BASEDIR = System.getProperty("test.build.dir",
@@ -116,8 +118,12 @@ public class TestHttpCookieFlag {
             .getConnectorAddress(0)));
     HttpURLConnection conn = (HttpURLConnection) new URL(base,
             "/echo").openConnection();
-    Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE + "=token; " +
-            "HttpOnly", conn.getHeaderField("Set-Cookie"));
+
+    String header = conn.getHeaderField("Set-Cookie");
+    List<HttpCookie> cookies = HttpCookie.parse(header);
+    Assert.assertTrue(!cookies.isEmpty());
+    Assert.assertTrue(header.contains("; HttpOnly"));
+    Assert.assertTrue("token".equals(cookies.get(0).getValue()));
   }
 
   @Test
@@ -127,8 +133,13 @@ public class TestHttpCookieFlag {
     HttpsURLConnection conn = (HttpsURLConnection) new URL(base,
             "/echo").openConnection();
     conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
-    Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE + "=token; " +
-            "Secure; HttpOnly", conn.getHeaderField("Set-Cookie"));
+
+    String header = conn.getHeaderField("Set-Cookie");
+    List<HttpCookie> cookies = HttpCookie.parse(header);
+    Assert.assertTrue(!cookies.isEmpty());
+    Assert.assertTrue(header.contains("; HttpOnly"));
+    Assert.assertTrue(cookies.get(0).getSecure());
+    Assert.assertTrue("token".equals(cookies.get(0).getValue()));
   }
 
   @AfterClass

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java Tue Aug 19 23:49:39 2014
@@ -45,7 +45,7 @@ import javax.servlet.http.HttpServletReq
 import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -259,13 +259,6 @@ public class TestHttpServer extends Http
     conn.connect();
     assertEquals(200, conn.getResponseCode());
     assertEquals("text/html; charset=utf-8", conn.getContentType());
-
-    // JSPs should default to text/html with utf8
-    servletUrl = new URL(baseUrl, "/testjsp.jsp");
-    conn = (HttpURLConnection)servletUrl.openConnection();
-    conn.connect();
-    assertEquals(200, conn.getResponseCode());
-    assertEquals("text/html; charset=utf-8", conn.getContentType());
   }
 
   /**
@@ -421,7 +414,7 @@ public class TestHttpServer extends Http
         assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
             + servlet, user));
       }
-      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
+      assertEquals(HttpURLConnection.HTTP_FORBIDDEN, getHttpStatusCode(
           serverURL + servlet, "userE"));
     }
     myServer.stop();
@@ -481,7 +474,7 @@ public class TestHttpServer extends Http
     response = Mockito.mock(HttpServletResponse.class);
     conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
     Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString());
 
     //authorization ON & user NOT NULL & ACLs NULL
     response = Mockito.mock(HttpServletResponse.class);
@@ -494,7 +487,7 @@ public class TestHttpServer extends Http
     Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
     Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls);
     Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response));
-    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+    Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString());
 
     //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
     response = Mockito.mock(HttpServletResponse.class);

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java Tue Aug 19 23:49:39 2014
@@ -24,6 +24,7 @@ import java.nio.BufferUnderflowException
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.util.Random;
+import com.google.common.base.Charsets;
 import com.google.common.primitives.Bytes;
 
 /** Unit tests for LargeUTF8. */
@@ -363,6 +364,27 @@ public class TestText extends TestCase {
       fail("testReadWriteOperations error !!!");
     }        
   }
+
+  public void testReadWithKnownLength() throws IOException {
+    String line = "hello world";
+    byte[] inputBytes = line.getBytes(Charsets.UTF_8);
+    DataInputBuffer in = new DataInputBuffer();
+    Text text = new Text();
+
+    in.reset(inputBytes, inputBytes.length);
+    text.readWithKnownLength(in, 5);
+    assertEquals("hello", text.toString());
+
+    // Read longer length, make sure it lengthens
+    in.reset(inputBytes, inputBytes.length);
+    text.readWithKnownLength(in, 7);
+    assertEquals("hello w", text.toString());
+
+    // Read shorter length, make sure it shortens
+    in.reset(inputBytes, inputBytes.length);
+    text.readWithKnownLength(in, 2);
+    assertEquals("he", text.toString());
+  }
   
   /**
    * test {@code Text.bytesToCodePoint(bytes) } 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java Tue Aug 19 23:49:39 2014
@@ -23,6 +23,7 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Random;
 
+import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -30,6 +31,11 @@ import junit.framework.TestCase;
 
 /** Unit tests for Writable. */
 public class TestWritable extends TestCase {
+private static final String TEST_CONFIG_PARAM = "frob.test";
+private static final String TEST_CONFIG_VALUE = "test";
+private static final String TEST_WRITABLE_CONFIG_PARAM = "test.writable";
+private static final String TEST_WRITABLE_CONFIG_VALUE = TEST_CONFIG_VALUE;
+
   public TestWritable(String name) { super(name); }
 
   /** Example class used in test cases below. */
@@ -64,6 +70,25 @@ public class TestWritable extends TestCa
     }
   }
 
+  public static class SimpleWritableComparable extends SimpleWritable
+      implements WritableComparable<SimpleWritableComparable>, Configurable {
+    private Configuration conf;
+
+    public SimpleWritableComparable() {}
+
+    public void setConf(Configuration conf) {
+      this.conf = conf;
+    }
+
+    public Configuration getConf() {
+      return this.conf;
+    }
+
+    public int compareTo(SimpleWritableComparable o) {
+      return this.state - o.state;
+    }
+  }
+
   /** Test 1: Check that SimpleWritable. */
   public void testSimpleWritable() throws Exception {
     testWritable(new SimpleWritable());
@@ -121,9 +146,34 @@ public class TestWritable extends TestCa
     @Override public int compareTo(Frob o) { return 0; }
   }
 
-  /** Test that comparator is defined. */
+  /** Test that comparator is defined and configured. */
   public static void testGetComparator() throws Exception {
-    assert(WritableComparator.get(Frob.class) instanceof FrobComparator);
+    Configuration conf = new Configuration();
+
+    // Without conf.
+    WritableComparator frobComparator = WritableComparator.get(Frob.class);
+    assert(frobComparator instanceof FrobComparator);
+    assertNotNull(frobComparator.getConf());
+    assertNull(frobComparator.getConf().get(TEST_CONFIG_PARAM));
+
+    // With conf.
+    conf.set(TEST_CONFIG_PARAM, TEST_CONFIG_VALUE);
+    frobComparator = WritableComparator.get(Frob.class, conf);
+    assert(frobComparator instanceof FrobComparator);
+    assertNotNull(frobComparator.getConf());
+    assertEquals(conf.get(TEST_CONFIG_PARAM), TEST_CONFIG_VALUE);
+
+    // Without conf. should reuse configuration.
+    frobComparator = WritableComparator.get(Frob.class);
+    assert(frobComparator instanceof FrobComparator);
+    assertNotNull(frobComparator.getConf());
+    assertEquals(conf.get(TEST_CONFIG_PARAM), TEST_CONFIG_VALUE);
+
+    // New conf. should use new configuration.
+    frobComparator = WritableComparator.get(Frob.class, new Configuration());
+    assert(frobComparator instanceof FrobComparator);
+    assertNotNull(frobComparator.getConf());
+    assertNull(frobComparator.getConf().get(TEST_CONFIG_PARAM));
   }
 
   /**
@@ -153,4 +203,17 @@ public class TestWritable extends TestCa
         .compare(writable1, writable3) == 0);
   }
 
+  /**
+   * Test that Writable's are configured by Comparator.
+   */
+  public void testConfigurableWritableComparator() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(TEST_WRITABLE_CONFIG_PARAM, TEST_WRITABLE_CONFIG_VALUE);
+
+    WritableComparator wc = WritableComparator.get(SimpleWritableComparable.class, conf);
+    SimpleWritableComparable key = ((SimpleWritableComparable)wc.newKey());
+    assertNotNull(wc.getConf());
+    assertNotNull(key.getConf());
+    assertEquals(key.getConf().get(TEST_WRITABLE_CONFIG_PARAM), TEST_WRITABLE_CONFIG_VALUE);
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java Tue Aug 19 23:49:39 2014
@@ -23,7 +23,7 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.util.Random;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java Tue Aug 19 23:49:39 2014
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java Tue Aug 19 23:49:39 2014
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 import java.util.Random;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java Tue Aug 19 23:49:39 2014
@@ -22,7 +22,7 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.util.Random;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java Tue Aug 19 23:49:39 2014
@@ -19,7 +19,7 @@ package org.apache.hadoop.io.file.tfile;
 
 import java.io.IOException;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java Tue Aug 19 23:49:39 2014
@@ -21,7 +21,7 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 import java.util.Random;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Tue Aug 19 23:49:39 2014
@@ -49,7 +49,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 
 public class TestNativeIO {
@@ -572,7 +571,6 @@ public class TestNativeIO {
   @Test(timeout=10000)
   public void testMlock() throws Exception {
     assumeTrue(NativeIO.isAvailable());
-    assumeTrue(Shell.LINUX);
     final File TEST_FILE = new File(new File(
         System.getProperty("test.build.data","build/test/data")),
         "testMlockFile");
@@ -607,8 +605,8 @@ public class TestNativeIO {
         sum += mapbuf.get(i);
       }
       assertEquals("Expected sums to be equal", bufSum, sum);
-      // munlock the buffer
-      NativeIO.POSIX.munlock(mapbuf, fileSize);
+      // munmap the buffer, which also implicitly unlocks it
+      NativeIO.POSIX.munmap(mapbuf);
     } finally {
       if (channel != null) {
         channel.close();

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java Tue Aug 19 23:49:39 2014
@@ -26,27 +26,37 @@ import static org.apache.hadoop.io.retry
 import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumCountWithProportionalSleep;
 import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumTimeWithFixedSleep;
 import static org.apache.hadoop.io.retry.RetryPolicies.exponentialBackoffRetry;
+import static org.junit.Assert.*;
 
 import java.util.Collections;
 import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
-
-import junit.framework.TestCase;
+import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.hadoop.io.retry.UnreliableInterface.FatalException;
 import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RemoteException;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.lang.reflect.UndeclaredThrowableException;
 
-public class TestRetryProxy extends TestCase {
+public class TestRetryProxy {
   
   private UnreliableImplementation unreliableImpl;
   
-  @Override
-  protected void setUp() throws Exception {
+  @Before
+  public void setUp() throws Exception {
     unreliableImpl = new UnreliableImplementation();
   }
 
+  @Test
   public void testTryOnceThenFail() throws UnreliableException {
     UnreliableInterface unreliable = (UnreliableInterface)
       RetryProxy.create(UnreliableInterface.class, unreliableImpl, TRY_ONCE_THEN_FAIL);
@@ -62,6 +72,7 @@ public class TestRetryProxy extends Test
   /**
    * Test for {@link RetryInvocationHandler#isRpcInvocation(Object)}
    */
+  @Test
   public void testRpcInvocation() throws Exception {
     // For a proxy method should return true
     final UnreliableInterface unreliable = (UnreliableInterface)
@@ -91,6 +102,7 @@ public class TestRetryProxy extends Test
     assertFalse(RetryInvocationHandler.isRpcInvocation(new Object()));
   }
   
+  @Test
   public void testRetryForever() throws UnreliableException {
     UnreliableInterface unreliable = (UnreliableInterface)
       RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER);
@@ -99,6 +111,7 @@ public class TestRetryProxy extends Test
     unreliable.failsTenTimesThenSucceeds();
   }
   
+  @Test
   public void testRetryUpToMaximumCountWithFixedSleep() throws UnreliableException {
     UnreliableInterface unreliable = (UnreliableInterface)
       RetryProxy.create(UnreliableInterface.class, unreliableImpl,
@@ -113,6 +126,7 @@ public class TestRetryProxy extends Test
     }
   }
   
+  @Test
   public void testRetryUpToMaximumTimeWithFixedSleep() throws UnreliableException {
     UnreliableInterface unreliable = (UnreliableInterface)
       RetryProxy.create(UnreliableInterface.class, unreliableImpl,
@@ -127,6 +141,7 @@ public class TestRetryProxy extends Test
     }
   }
   
+  @Test
   public void testRetryUpToMaximumCountWithProportionalSleep() throws UnreliableException {
     UnreliableInterface unreliable = (UnreliableInterface)
       RetryProxy.create(UnreliableInterface.class, unreliableImpl,
@@ -141,6 +156,7 @@ public class TestRetryProxy extends Test
     }
   }
   
+  @Test
   public void testExponentialRetry() throws UnreliableException {
     UnreliableInterface unreliable = (UnreliableInterface)
       RetryProxy.create(UnreliableInterface.class, unreliableImpl,
@@ -155,6 +171,7 @@ public class TestRetryProxy extends Test
     }
   }
   
+  @Test
   public void testRetryByException() throws UnreliableException {
     Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
       Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL);
@@ -171,6 +188,7 @@ public class TestRetryProxy extends Test
     }
   }
   
+  @Test
   public void testRetryByRemoteException() {
     Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
       Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL);
@@ -186,4 +204,35 @@ public class TestRetryProxy extends Test
     }
   }  
   
+  @Test
+  public void testRetryInterruptible() throws Throwable {
+    final UnreliableInterface unreliable = (UnreliableInterface)
+        RetryProxy.create(UnreliableInterface.class, unreliableImpl,
+            retryUpToMaximumTimeWithFixedSleep(10, 10, TimeUnit.SECONDS));
+    
+    final CountDownLatch latch = new CountDownLatch(1);
+    final AtomicReference<Thread> futureThread = new AtomicReference<Thread>();
+    ExecutorService exec = Executors.newSingleThreadExecutor();
+    Future<Throwable> future = exec.submit(new Callable<Throwable>(){
+      @Override
+      public Throwable call() throws Exception {
+        futureThread.set(Thread.currentThread());
+        latch.countDown();
+        try {
+          unreliable.alwaysFailsWithFatalException();
+        } catch (UndeclaredThrowableException ute) {
+          return ute.getCause();
+        }
+        return null;
+      }
+    });
+    latch.await();
+    Thread.sleep(1000); // time to fail and sleep
+    assertTrue(futureThread.get().isAlive());
+    futureThread.get().interrupt();
+    Throwable e = future.get(1, TimeUnit.SECONDS); // should return immediately 
+    assertNotNull(e);
+    assertEquals(InterruptedException.class, e.getClass());
+    assertEquals("sleep interrupted", e.getMessage());
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java Tue Aug 19 23:49:39 2014
@@ -26,7 +26,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Enumeration;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +35,7 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -326,8 +327,8 @@ public class MiniRPCBenchmark {
     String shortUserName =
       UserGroupInformation.createRemoteUser(user).getShortUserName();
     try {
-      conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(shortUserName),
-          GROUP_NAME_1);
+      conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+              getProxySuperuserGroupConfKey(shortUserName), GROUP_NAME_1);
       configureSuperUserIPAddresses(conf, shortUserName);
       // start the server
       miniServer = new MiniServer(conf, user, keytabFile);
@@ -410,7 +411,7 @@ public class MiniRPCBenchmark {
     }
     builder.append("127.0.1.1,");
     builder.append(InetAddress.getLocalHost().getCanonicalHostName());
-    conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName),
-        builder.toString());
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(superUserShortName), builder.toString());
   }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java Tue Aug 19 23:49:39 2014
@@ -496,6 +496,8 @@ public class TestRPC {
       caught = true;
     }
     assertTrue(caught);
+    rb = getMetrics(server.rpcDetailedMetrics.name());
+    assertCounter("IOExceptionNumOps", 1L, rb);
 
     proxy.testServerGet();
 
@@ -581,14 +583,14 @@ public class TestRPC {
       }
       MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
       if (expectFailure) {
-        assertCounter("RpcAuthorizationFailures", 1, rb);
+        assertCounter("RpcAuthorizationFailures", 1L, rb);
       } else {
-        assertCounter("RpcAuthorizationSuccesses", 1, rb);
+        assertCounter("RpcAuthorizationSuccesses", 1L, rb);
       }
       //since we don't have authentication turned ON, we should see 
       // 0 for the authentication successes and 0 for failure
-      assertCounter("RpcAuthenticationFailures", 0, rb);
-      assertCounter("RpcAuthenticationSuccesses", 0, rb);
+      assertCounter("RpcAuthenticationFailures", 0L, rb);
+      assertCounter("RpcAuthenticationSuccesses", 0L, rb);
     }
   }
   

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java Tue Aug 19 23:49:39 2014
@@ -26,7 +26,7 @@ import java.io.IOException;
 import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -390,4 +390,4 @@ System.out.println("echo int is NOT supp
     builder.setRpcKind(rpcKind.toString());
     return builder.build();
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java Tue Aug 19 23:49:39 2014
@@ -24,7 +24,7 @@ import java.util.Map;
 
 import javax.net.SocketFactory;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;