You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/10/19 04:27:38 UTC

svn commit: r1399950 [15/17] - in /hadoop/common/branches/HDFS-2802/hadoop-common-project: hadoop-annotations/ hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ hadoop-auth-examples/ hadoop-auth/ hadoop-auth/src/main/java/org/apa...

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Fri Oct 19 02:25:55 2012
@@ -61,7 +61,7 @@ public class TestPath extends TestCase {
     assertEquals(pathString, new Path(pathString).toString());
   }
 
-  public void testNormalize() {
+  public void testNormalize() throws URISyntaxException {
     assertEquals("", new Path(".").toString());
     assertEquals("..", new Path("..").toString());
     assertEquals("/", new Path("/").toString());
@@ -75,6 +75,8 @@ public class TestPath extends TestCase {
     assertEquals("foo", new Path("foo/").toString());
     assertEquals("foo", new Path("foo//").toString());
     assertEquals("foo/bar", new Path("foo//bar").toString());
+    assertEquals("hdfs://foo/foo2/bar/baz/",
+        new Path(new URI("hdfs://foo//foo2///bar/baz///")).toString());
     if (Path.WINDOWS) {
       assertEquals("c:/a/b", new Path("c:\\a\\b").toString());
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java Fri Oct 19 02:25:55 2012
@@ -24,6 +24,7 @@ import org.junit.Before;
 
 public class TestS3_LocalFileContextURI extends FileContextURIBase {
 
+  @Override
   @Before
   public void setUp() throws Exception {
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Fri Oct 19 02:25:55 2012
@@ -26,12 +26,15 @@ import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URI;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.HashSet;
 import java.util.Set;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Time;
 
 /**
  * This class tests commands from Trash.
@@ -66,6 +69,7 @@ public class TestTrash extends TestCase 
 
     // filter that matches all the files that start with fileName*
     PathFilter pf = new PathFilter() {
+      @Override
       public boolean accept(Path file) {
         return file.getName().startsWith(prefix);
       }
@@ -97,7 +101,6 @@ public class TestTrash extends TestCase 
   }
 
   /**
-   * 
    * Test trash for the shell's delete command for the default file system
    * specified in the paramter conf
    * @param conf 
@@ -110,10 +113,10 @@ public class TestTrash extends TestCase 
       throws IOException {
     FileSystem fs = FileSystem.get(conf);
 
-    conf.set(FS_TRASH_INTERVAL_KEY, "0"); // disabled
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 0); // disabled
     assertFalse(new Trash(conf).isEnabled());
 
-    conf.set(FS_TRASH_INTERVAL_KEY, "10"); // 10 minute
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 10); // 10 minute
     assertTrue(new Trash(conf).isEnabled());
 
     FsShell shell = new FsShell();
@@ -427,14 +430,46 @@ public class TestTrash extends TestCase 
       String output = byteStream.toString();
       System.setOut(stdout);
       System.setErr(stderr);
-      assertTrue("skipTrash wasn't suggested as remedy to failed rm command",
-        output.indexOf(("Consider using -skipTrash option")) != -1 );
+      assertTrue("skipTrash wasn't suggested as remedy to failed rm command" +
+          " or we deleted / even though we could not get server defaults",
+          output.indexOf("Consider using -skipTrash option") != -1 ||
+          output.indexOf("Failed to determine server trash configuration") != -1);
+    }
+
+    // Verify old checkpoint format is recognized
+    {
+      // emulate two old trash checkpoint directories, one that is old enough
+      // to be deleted on the next expunge and one that isn't.
+      long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY,
+          FS_TRASH_INTERVAL_DEFAULT);
+      long now = Time.now();
+      DateFormat oldCheckpointFormat = new SimpleDateFormat("yyMMddHHmm");
+      Path dirToDelete = new Path(trashRoot.getParent(),
+          oldCheckpointFormat.format(now - (trashInterval * 60 * 1000) - 1));
+      Path dirToKeep = new Path(trashRoot.getParent(),
+          oldCheckpointFormat.format(now));
+      mkdir(trashRootFs, dirToDelete);
+      mkdir(trashRootFs, dirToKeep);
+
+      // Clear out trash
+      int rc = -1;
+      try {
+        rc = shell.run(new String [] { "-expunge" } );
+      } catch (Exception e) {
+        System.err.println("Exception raised from fs expunge " +
+            e.getLocalizedMessage());
+      }
+      assertEquals(0, rc);
+      assertFalse("old checkpoint format not recognized",
+          trashRootFs.exists(dirToDelete));
+      assertTrue("old checkpoint format directory should not be removed",
+          trashRootFs.exists(dirToKeep));
     }
 
   }
 
   public static void trashNonDefaultFS(Configuration conf) throws IOException {
-    conf.set(FS_TRASH_INTERVAL_KEY, "10"); // 10 minute
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 10); // 10 minute
     // attempt non-default FileSystem trash
     {
       final FileSystem lfs = FileSystem.getLocal(conf);
@@ -562,6 +597,7 @@ public class TestTrash extends TestCase 
       super();
       this.home = home;
     }
+    @Override
     public Path getHomeDirectory() {
       return home;
     }
@@ -579,7 +615,7 @@ public class TestTrash extends TestCase 
     FileSystem fs = FileSystem.getLocal(conf);
     
     conf.set("fs.defaultFS", fs.getUri().toString());
-    conf.set(FS_TRASH_INTERVAL_KEY, "10"); //minutes..
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 10); //minutes..
     FsShell shell = new FsShell();
     shell.setConf(conf);
     //Path trashRoot = null;
@@ -600,7 +636,7 @@ public class TestTrash extends TestCase 
       
       writeFile(fs, myFile, 10);
       
-      start = System.currentTimeMillis();
+      start = Time.now();
       
       try {
         retVal = shell.run(args);
@@ -612,7 +648,7 @@ public class TestTrash extends TestCase 
       
       assertTrue(retVal == 0);
       
-      long iterTime = System.currentTimeMillis() - start;
+      long iterTime = Time.now() - start;
       // take median of the first 10 runs
       if(i<10) {
         if(i==0) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java Fri Oct 19 02:25:55 2012
@@ -67,6 +67,7 @@ public class DataGenerator extends Confi
    * namespace. Afterwards it reads the file attributes and creates files 
    * in the file. All file content is filled with 'a'.
    */
+  @Override
   public int run(String[] args) throws Exception {
     int exitCode = 0;
     exitCode = init(args);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java Fri Oct 19 02:25:55 2012
@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Options.CreateOpts;
+import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -121,7 +122,7 @@ public class LoadGenerator extends Confi
   private double [] writeProbs = {0.3333};
   private volatile int currentIndex = 0;
   long totalTime = 0;
-  private long startTime = System.currentTimeMillis()+10000;
+  private long startTime = Time.now()+10000;
   final static private int BLOCK_SIZE = 10;
   private ArrayList<String> files = new ArrayList<String>();  // a table of file names
   private ArrayList<String> dirs = new ArrayList<String>(); // a table of directory names
@@ -185,6 +186,7 @@ public class LoadGenerator extends Confi
     /** Main loop
      * Each iteration decides what's the next operation and then pauses.
      */
+    @Override
     public void run() {
       try {
         while (shouldRun) {
@@ -232,9 +234,9 @@ public class LoadGenerator extends Confi
      * the entire file */
     private void read() throws IOException {
       String fileName = files.get(r.nextInt(files.size()));
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       InputStream in = fc.open(new Path(fileName));
-      executionTime[OPEN] += (System.currentTimeMillis()-startTime);
+      executionTime[OPEN] += (Time.now()-startTime);
       totalNumOfOps[OPEN]++;
       while (in.read(buffer) != -1) {}
       in.close();
@@ -254,9 +256,9 @@ public class LoadGenerator extends Confi
       double fileSize = 0;
       while ((fileSize = r.nextGaussian()+2)<=0) {}
       genFile(file, (long)(fileSize*BLOCK_SIZE));
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       fc.delete(file, true);
-      executionTime[DELETE] += (System.currentTimeMillis()-startTime);
+      executionTime[DELETE] += (Time.now()-startTime);
       totalNumOfOps[DELETE]++;
     }
     
@@ -265,9 +267,9 @@ public class LoadGenerator extends Confi
      */
     private void list() throws IOException {
       String dirName = dirs.get(r.nextInt(dirs.size()));
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       fc.listStatus(new Path(dirName));
-      executionTime[LIST] += (System.currentTimeMillis()-startTime);
+      executionTime[LIST] += (Time.now()-startTime);
       totalNumOfOps[LIST]++;
     }
   }
@@ -280,6 +282,7 @@ public class LoadGenerator extends Confi
    * Before exiting, it prints the average execution for 
    * each operation and operation throughput.
    */
+  @Override
   public int run(String[] args) throws Exception {
     int exitCode = init(args);
     if (exitCode != 0) {
@@ -435,7 +438,7 @@ public class LoadGenerator extends Confi
     }
     
     if (r==null) {
-      r = new Random(System.currentTimeMillis()+hostHashCode);
+      r = new Random(Time.now()+hostHashCode);
     }
     
     return initFileDirTables();
@@ -571,7 +574,7 @@ public class LoadGenerator extends Confi
    */
   private void barrier() {
     long sleepTime;
-    while ((sleepTime = startTime - System.currentTimeMillis()) > 0) {
+    while ((sleepTime = startTime - Time.now()) > 0) {
       try {
         Thread.sleep(sleepTime);
       } catch (InterruptedException ex) {
@@ -583,20 +586,20 @@ public class LoadGenerator extends Confi
    * The file is filled with 'a'.
    */
   private void genFile(Path file, long fileSize) throws IOException {
-    long startTime = System.currentTimeMillis();
+    long startTime = Time.now();
     FSDataOutputStream out = fc.create(file,
         EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
         CreateOpts.createParent(), CreateOpts.bufferSize(4096),
         CreateOpts.repFac((short) 3));
-    executionTime[CREATE] += (System.currentTimeMillis()-startTime);
+    executionTime[CREATE] += (Time.now()-startTime);
     totalNumOfOps[CREATE]++;
 
     for (long i=0; i<fileSize; i++) {
       out.writeByte('a');
     }
-    startTime = System.currentTimeMillis();
+    startTime = Time.now();
     out.close();
-    executionTime[WRITE_CLOSE] += (System.currentTimeMillis()-startTime);
+    executionTime[WRITE_CLOSE] += (Time.now()-startTime);
     totalNumOfOps[WRITE_CLOSE]++;
   }
   

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java Fri Oct 19 02:25:55 2012
@@ -214,6 +214,7 @@ public class StructureGenerator {
     }
     
     /** Output a file attribute */
+    @Override
     protected void outputFiles(PrintStream out, String prefix) {
       prefix = (prefix == null)?super.name: prefix + "/"+super.name;
       out.println(prefix + " " + numOfBlocks);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java Fri Oct 19 02:25:55 2012
@@ -47,34 +47,42 @@ class InMemoryFileSystemStore implements
   private SortedMap<Path, INode> inodes = new TreeMap<Path, INode>();
   private Map<Long, byte[]> blocks = new HashMap<Long, byte[]>();
   
+  @Override
   public void initialize(URI uri, Configuration conf) {
     this.conf = conf;
   }
   
+  @Override
   public String getVersion() throws IOException {
     return "0";
   }
 
+  @Override
   public void deleteINode(Path path) throws IOException {
     inodes.remove(normalize(path));
   }
 
+  @Override
   public void deleteBlock(Block block) throws IOException {
     blocks.remove(block.getId());
   }
 
+  @Override
   public boolean inodeExists(Path path) throws IOException {
     return inodes.containsKey(normalize(path));
   }
 
+  @Override
   public boolean blockExists(long blockId) throws IOException {
     return blocks.containsKey(blockId);
   }
 
+  @Override
   public INode retrieveINode(Path path) throws IOException {
     return inodes.get(normalize(path));
   }
 
+  @Override
   public File retrieveBlock(Block block, long byteRangeStart) throws IOException {
     byte[] data = blocks.get(block.getId());
     File file = createTempFile();
@@ -100,6 +108,7 @@ class InMemoryFileSystemStore implements
     return result;
   }
 
+  @Override
   public Set<Path> listSubPaths(Path path) throws IOException {
     Path normalizedPath = normalize(path);
     // This is inefficient but more than adequate for testing purposes.
@@ -112,6 +121,7 @@ class InMemoryFileSystemStore implements
     return subPaths;
   }
 
+  @Override
   public Set<Path> listDeepSubPaths(Path path) throws IOException {
     Path normalizedPath = normalize(path);    
     String pathString = normalizedPath.toUri().getPath();
@@ -128,10 +138,12 @@ class InMemoryFileSystemStore implements
     return subPaths;
   }
 
+  @Override
   public void storeINode(Path path, INode inode) throws IOException {
     inodes.put(normalize(path), inode);
   }
 
+  @Override
   public void storeBlock(Block block, File file) throws IOException {
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     byte[] buf = new byte[8192];
@@ -157,11 +169,13 @@ class InMemoryFileSystemStore implements
     return new Path(path.toUri().getPath());
   }
 
+  @Override
   public void purge() throws IOException {
     inodes.clear();
     blocks.clear();
   }
 
+  @Override
   public void dump() throws IOException {
     StringBuilder sb = new StringBuilder(getClass().getSimpleName());
     sb.append(", \n");

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java Fri Oct 19 02:25:55 2012
@@ -39,6 +39,7 @@ import java.util.TreeSet;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Time;
 
 /**
  * <p>
@@ -54,15 +55,18 @@ class InMemoryNativeFileSystemStore impl
     new TreeMap<String, FileMetadata>();
   private SortedMap<String, byte[]> dataMap = new TreeMap<String, byte[]>();
 
+  @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
     this.conf = conf;
   }
 
+  @Override
   public void storeEmptyFile(String key) throws IOException {
-    metadataMap.put(key, new FileMetadata(key, 0, System.currentTimeMillis()));
+    metadataMap.put(key, new FileMetadata(key, 0, Time.now()));
     dataMap.put(key, new byte[0]);
   }
 
+  @Override
   public void storeFile(String key, File file, byte[] md5Hash)
     throws IOException {
     
@@ -81,14 +85,16 @@ class InMemoryNativeFileSystemStore impl
       }
     }
     metadataMap.put(key,
-        new FileMetadata(key, file.length(), System.currentTimeMillis()));
+        new FileMetadata(key, file.length(), Time.now()));
     dataMap.put(key, out.toByteArray());
   }
 
+  @Override
   public InputStream retrieve(String key) throws IOException {
     return retrieve(key, 0);
   }
   
+  @Override
   public InputStream retrieve(String key, long byteRangeStart)
     throws IOException {
     
@@ -117,15 +123,18 @@ class InMemoryNativeFileSystemStore impl
     return result;
   }
 
+  @Override
   public FileMetadata retrieveMetadata(String key) throws IOException {
     return metadataMap.get(key);
   }
 
+  @Override
   public PartialListing list(String prefix, int maxListingLength)
       throws IOException {
     return list(prefix, maxListingLength, null, false);
   }
 
+  @Override
   public PartialListing list(String prefix, int maxListingLength,
       String priorLastKey, boolean recursive) throws IOException {
 
@@ -164,16 +173,19 @@ class InMemoryNativeFileSystemStore impl
         commonPrefixes.toArray(new String[0]));
   }
 
+  @Override
   public void delete(String key) throws IOException {
     metadataMap.remove(key);
     dataMap.remove(key);
   }
 
+  @Override
   public void copy(String srcKey, String dstKey) throws IOException {
     metadataMap.put(dstKey, metadataMap.get(srcKey));
     dataMap.put(dstKey, dataMap.get(srcKey));
   }
   
+  @Override
   public void purge(String prefix) throws IOException {
     Iterator<Entry<String, FileMetadata>> i =
       metadataMap.entrySet().iterator();
@@ -186,6 +198,7 @@ class InMemoryNativeFileSystemStore impl
     }
   }
 
+  @Override
   public void dump() throws IOException {
     System.out.println(metadataMap.values());
     System.out.println(dataMap.keySet());

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java Fri Oct 19 02:25:55 2012
@@ -48,6 +48,10 @@ public class TestCommandFactory {
     factory.addClass(TestCommand3.class, "tc3");
     names = factory.getNames();
     assertArrayEquals(new String []{"tc1", "tc2", "tc2.1", "tc3"}, names);
+    
+    factory.addClass(TestCommand4.class, (new TestCommand4()).getName());
+    names = factory.getNames();
+    assertArrayEquals(new String[]{"tc1", "tc2", "tc2.1", "tc3", "tc4"}, names);
   }
   
   @Test
@@ -72,8 +76,17 @@ public class TestCommandFactory {
     assertNotNull(instance);
     assertEquals(TestCommand2.class, instance.getClass());    
     assertEquals("tc2.1", instance.getCommandName());
+    
+    factory.addClass(TestCommand4.class, "tc4");
+    instance = factory.getInstance("tc4");
+    assertNotNull(instance);
+    assertEquals(TestCommand4.class, instance.getClass());    
+    assertEquals("tc4", instance.getCommandName());
+    String usage = instance.getUsage();
+    assertEquals("-tc4 tc4_usage", usage);
+    assertEquals("tc4_description", instance.getDescription());
   }
-  
+
   static class TestRegistrar {
     public static void registerCommands(CommandFactory factory) {
       factory.addClass(TestCommand1.class, "tc1");
@@ -84,4 +97,10 @@ public class TestCommandFactory {
   static class TestCommand1 extends FsCommand {}
   static class TestCommand2 extends FsCommand {}
   static class TestCommand3 extends FsCommand {}
+  
+  static class TestCommand4 extends FsCommand {
+    static final String NAME = "tc4";
+    static final String USAGE = "tc4_usage";
+    static final String DESCRIPTION = "tc4_description";
+  }
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java Fri Oct 19 02:25:55 2012
@@ -26,23 +26,17 @@ import java.util.Arrays;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.junit.BeforeClass;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 
 public class TestPathData {
-  protected static Configuration conf;
-  protected static FileSystem fs;
-  protected static String dirString;
-  protected static Path testDir;
-  protected static PathData item;
-  
-  protected static String[] d1Paths =
-    new String[] { "d1/f1", "d1/f1.1", "d1/f2" };
-  protected static String[] d2Paths =
-    new String[] { "d2/f3" };
-        
-  @BeforeClass
-  public static void initialize() throws Exception {
+  protected Configuration conf;
+  protected FileSystem fs;
+  protected Path testDir;
+
+  @Before
+  public void initialize() throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testDir = new Path(
@@ -60,23 +54,28 @@ public class TestPathData {
     fs.create(new Path("d2","f3"));
   }
 
+  @After
+  public void cleanup() throws Exception {
+    fs.close();
+  }
+
   @Test
   public void testWithDirStringAndConf() throws Exception {
-    dirString = "d1";
-    item = new PathData(dirString, conf);
-    checkPathData();
+    String dirString = "d1";
+    PathData item = new PathData(dirString, conf);
+    checkPathData(dirString, item);
 
     // properly implementing symlink support in various commands will require
     // trailing slashes to be retained
     dirString = "d1/";
     item = new PathData(dirString, conf);
-    checkPathData();
+    checkPathData(dirString, item);
   }
 
   @Test
   public void testUnqualifiedUriContents() throws Exception {
-    dirString = "d1";
-    item = new PathData(dirString, conf);
+    String dirString = "d1";
+    PathData item = new PathData(dirString, conf);
     PathData[] items = item.getDirectoryContents();
     assertEquals(
         sortedString("d1/f1", "d1/f1.1", "d1/f2"),
@@ -86,8 +85,8 @@ public class TestPathData {
 
   @Test
   public void testQualifiedUriContents() throws Exception {
-    dirString = fs.makeQualified(new Path("d1")).toString();
-    item = new PathData(dirString, conf);
+    String dirString = fs.makeQualified(new Path("d1")).toString();
+    PathData item = new PathData(dirString, conf);
     PathData[] items = item.getDirectoryContents();
     assertEquals(
         sortedString(dirString+"/f1", dirString+"/f1.1", dirString+"/f2"),
@@ -97,8 +96,8 @@ public class TestPathData {
 
   @Test
   public void testCwdContents() throws Exception {
-    dirString = Path.CUR_DIR;
-    item = new PathData(dirString, conf);
+    String dirString = Path.CUR_DIR;
+    PathData item = new PathData(dirString, conf);
     PathData[] items = item.getDirectoryContents();
     assertEquals(
         sortedString("d1", "d2"),
@@ -106,17 +105,16 @@ public class TestPathData {
     );
   }
 
-
-	@Test
-	public void testToFile() throws Exception {
-    item = new PathData(".", conf);
+  @Test
+  public void testToFile() throws Exception {
+    PathData item = new PathData(".", conf);
     assertEquals(new File(testDir.toString()), item.toFile());
-	  item = new PathData("d1/f1", conf);
-	  assertEquals(new File(testDir+"/d1/f1"), item.toFile());
-    item = new PathData(testDir+"/d1/f1", conf);
-    assertEquals(new File(testDir+"/d1/f1"), item.toFile());
-	}
-	
+    item = new PathData("d1/f1", conf);
+    assertEquals(new File(testDir + "/d1/f1"), item.toFile());
+    item = new PathData(testDir + "/d1/f1", conf);
+    assertEquals(new File(testDir + "/d1/f1"), item.toFile());
+  }
+
   @Test
   public void testAbsoluteGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf);
@@ -147,18 +145,18 @@ public class TestPathData {
 
   @Test
   public void testWithStringAndConfForBuggyPath() throws Exception {
-    dirString = "file:///tmp";
-    testDir = new Path(dirString);
-    item = new PathData(dirString, conf);
+    String dirString = "file:///tmp";
+    Path tmpDir = new Path(dirString);
+    PathData item = new PathData(dirString, conf);
     // this may fail some day if Path is fixed to not crunch the uri
     // if the authority is null, however we need to test that the PathData
     // toString() returns the given string, while Path toString() does
     // the crunching
-    assertEquals("file:/tmp", testDir.toString());
-    checkPathData();
+    assertEquals("file:/tmp", tmpDir.toString());
+    checkPathData(dirString, item);
   }
 
-  public void checkPathData() throws Exception {
+  public void checkPathData(String dirString, PathData item) throws Exception {
     assertEquals("checking fs", fs, item.fs);
     assertEquals("checking string", dirString, item.toString());
     assertEquals("checking path",

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java Fri Oct 19 02:25:55 2012
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
+import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.viewfs.ChRootedFileSystem;
@@ -33,6 +34,7 @@ import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import static org.mockito.Mockito.*;
 
 public class TestChRootedFileSystem {
   FileSystem fSys; // The ChRoootedFs
@@ -314,4 +316,47 @@ public class TestChRootedFileSystem {
   public void testResolvePathNonExisting() throws IOException {
       fSys.resolvePath(new Path("/nonExisting"));
   }
-}
+  
+  @Test
+  public void testDeleteOnExitPathHandling() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+        
+    URI chrootUri = URI.create("mockfs://foo/a/b");
+    ChRootedFileSystem chrootFs = new ChRootedFileSystem(chrootUri, conf);
+    FileSystem mockFs = ((FilterFileSystem)chrootFs.getRawFileSystem())
+        .getRawFileSystem();
+    
+    // ensure delete propagates the correct path
+    Path chrootPath = new Path("/c");
+    Path rawPath = new Path("/a/b/c");
+    chrootFs.delete(chrootPath, false);
+    verify(mockFs).delete(eq(rawPath), eq(false));
+    reset(mockFs);
+ 
+    // fake that the path exists for deleteOnExit
+    FileStatus stat = mock(FileStatus.class);
+    when(mockFs.getFileStatus(eq(rawPath))).thenReturn(stat);
+    // ensure deleteOnExit propagates the correct path
+    chrootFs.deleteOnExit(chrootPath);
+    chrootFs.close();
+    verify(mockFs).delete(eq(rawPath), eq(true));
+  }
+  
+  @Test
+  public void testURIEmptyPath() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+
+    URI chrootUri = URI.create("mockfs://foo");
+    new ChRootedFileSystem(chrootUri, conf);
+  }
+
+  static class MockFileSystem extends FilterFileSystem {
+    MockFileSystem() {
+      super(mock(FileSystem.class));
+    }
+    @Override
+    public void initialize(URI name, Configuration conf) throws IOException {}
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java Fri Oct 19 02:25:55 2012
@@ -33,6 +33,7 @@ import org.junit.Test;
 
 public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest {
    static FileSystem fcTarget;
+  @Override
   @Before
   public void setUp() throws Exception {
     Configuration conf = new Configuration();
@@ -42,6 +43,7 @@ public class TestFSMainOperationsLocalFi
     super.setUp();
   }
   
+  @Override
   @After
   public void tearDown() throws Exception {
     super.tearDown();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java Fri Oct 19 02:25:55 2012
@@ -28,12 +28,14 @@ public class TestFcCreateMkdirLocalFs  e
   FileContextCreateMkdirBaseTest {
 
 
+  @Override
   @Before
   public void setUp() throws Exception {
     fc = ViewFsTestSetup.setupForViewFsLocalFs();
     super.setUp();
   }
   
+  @Override
   @After
   public void tearDown() throws Exception {
     super.tearDown();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java Fri Oct 19 02:25:55 2012
@@ -36,6 +36,7 @@ public class TestFcMainOperationsLocalFs
   FileContext fclocal;
   Path targetOfTests;
 
+  @Override
   @Before
   public void setUp() throws Exception {
     /**
@@ -79,6 +80,7 @@ public class TestFcMainOperationsLocalFs
     super.setUp();
   }
   
+  @Override
   @After
   public void tearDown() throws Exception {
     super.tearDown();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java Fri Oct 19 02:25:55 2012
@@ -27,12 +27,14 @@ import org.junit.Before;
 public class TestFcPermissionsLocalFs  extends FileContextPermissionBase {
 
 
+  @Override
   @Before
   public void setUp() throws Exception {
     fc = ViewFsTestSetup.setupForViewFsLocalFs();
     super.setUp();
   }
   
+  @Override
   @After
   public void tearDown() throws Exception {
     super.tearDown();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java Fri Oct 19 02:25:55 2012
@@ -22,10 +22,18 @@ import static org.junit.Assert.*;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
-
+import java.util.Arrays;
+import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsConstants;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RawLocalFileSystem;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 /**
@@ -38,6 +46,29 @@ import org.junit.Test;
 public class TestViewFileSystemDelegationTokenSupport {
   
   private static final String MOUNT_TABLE_NAME = "vfs-cluster";
+  static Configuration conf;
+  static FileSystem viewFs;
+  static FakeFileSystem fs1;
+  static FakeFileSystem fs2;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = ViewFileSystemTestSetup.createConfig();
+    fs1 = setupFileSystem(new URI("fs1:///"), FakeFileSystem.class);
+    fs2 = setupFileSystem(new URI("fs2:///"), FakeFileSystem.class);
+    viewFs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
+  }
+
+  static FakeFileSystem setupFileSystem(URI uri, Class<? extends FileSystem> clazz)
+      throws Exception {
+    String scheme = uri.getScheme();
+    conf.set("fs."+scheme+".impl", clazz.getName());
+    FakeFileSystem fs = (FakeFileSystem)FileSystem.get(uri, conf);
+    // mount each fs twice, will later ensure 1 token/fs
+    ConfigUtil.addLink(conf, "/mounts/"+scheme+"-one", fs.getUri());
+    ConfigUtil.addLink(conf, "/mounts/"+scheme+"-two", fs.getUri());
+    return fs;
+  }
 
   /**
    * Regression test for HADOOP-8408.
@@ -69,4 +100,94 @@ public class TestViewFileSystemDelegatio
     assertNull(serviceName);
   }
 
+  @Test
+  public void testGetChildFileSystems() throws Exception {
+    assertNull(fs1.getChildFileSystems());
+    assertNull(fs2.getChildFileSystems());    
+    List<FileSystem> children = Arrays.asList(viewFs.getChildFileSystems());
+    assertEquals(2, children.size());
+    assertTrue(children.contains(fs1));
+    assertTrue(children.contains(fs2));
+  }
+  
+  @Test
+  public void testAddDelegationTokens() throws Exception {
+    Credentials creds = new Credentials();
+    Token<?> fs1Tokens[] = addTokensWithCreds(fs1, creds);
+    assertEquals(1, fs1Tokens.length);
+    assertEquals(1, creds.numberOfTokens());
+    Token<?> fs2Tokens[] = addTokensWithCreds(fs2, creds);
+    assertEquals(1, fs2Tokens.length);
+    assertEquals(2, creds.numberOfTokens());
+    
+    Credentials savedCreds = creds;
+    creds = new Credentials();
+    
+    // should get the same set of tokens as explicitly fetched above
+    Token<?> viewFsTokens[] = viewFs.addDelegationTokens("me", creds);
+    assertEquals(2, viewFsTokens.length);
+    assertTrue(creds.getAllTokens().containsAll(savedCreds.getAllTokens()));
+    assertEquals(savedCreds.numberOfTokens(), creds.numberOfTokens()); 
+    // should get none, already have all tokens
+    viewFsTokens = viewFs.addDelegationTokens("me", creds);
+    assertEquals(0, viewFsTokens.length);
+    assertTrue(creds.getAllTokens().containsAll(savedCreds.getAllTokens()));
+    assertEquals(savedCreds.numberOfTokens(), creds.numberOfTokens());
+  }
+
+  Token<?>[] addTokensWithCreds(FileSystem fs, Credentials creds) throws Exception {
+    Credentials savedCreds;
+    
+    savedCreds = new Credentials(creds);
+    Token<?> tokens[] = fs.addDelegationTokens("me", creds);
+    // test that we got the token we wanted, and that creds were modified
+    assertEquals(1, tokens.length);
+    assertEquals(fs.getCanonicalServiceName(), tokens[0].getService().toString());
+    assertTrue(creds.getAllTokens().contains(tokens[0]));
+    assertTrue(creds.getAllTokens().containsAll(savedCreds.getAllTokens()));
+    assertEquals(savedCreds.numberOfTokens()+1, creds.numberOfTokens());
+    
+    // shouldn't get any new tokens since already in creds
+    savedCreds = new Credentials(creds);
+    Token<?> tokenRefetch[] = fs.addDelegationTokens("me", creds);
+    assertEquals(0, tokenRefetch.length);
+    assertTrue(creds.getAllTokens().containsAll(savedCreds.getAllTokens()));
+    assertEquals(savedCreds.numberOfTokens(), creds.numberOfTokens()); 
+
+    return tokens;
+  }
+
+  static class FakeFileSystem extends RawLocalFileSystem {
+    URI uri;
+
+    @Override
+    public void initialize(URI name, Configuration conf) throws IOException {
+      this.uri = name;
+    }
+
+    @Override
+    public Path getInitialWorkingDirectory() {
+      return new Path("/"); // ctor calls getUri before the uri is inited...
+    }
+    
+    @Override
+    public URI getUri() {
+      return uri;
+    }
+
+    @Override
+    public String getCanonicalServiceName() {
+      return String.valueOf(this.getUri()+"/"+this.hashCode());
+    }
+
+    @Override
+    public Token<?> getDelegationToken(String renewer) throws IOException {
+      Token<?> token = new Token<TokenIdentifier>();
+      token.setService(new Text(getCanonicalServiceName()));
+      return token;
+    }
+
+    @Override
+    public void close() {}
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java Fri Oct 19 02:25:55 2012
@@ -39,6 +39,7 @@ import org.junit.Before;
 public class TestViewFileSystemLocalFileSystem extends ViewFileSystemBaseTest {
 
 
+  @Override
   @Before
   public void setUp() throws Exception {
     // create the test root on local_fs
@@ -47,6 +48,7 @@ public class TestViewFileSystemLocalFile
     
   }
 
+  @Override
   @After
   public void tearDown() throws Exception {
     fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java Fri Oct 19 02:25:55 2012
@@ -42,6 +42,7 @@ import org.junit.Test;
 public class TestViewFileSystemWithAuthorityLocalFileSystem extends ViewFileSystemBaseTest {
   URI schemeWithAuthority;
 
+  @Override
   @Before
   public void setUp() throws Exception {
     // create the test root on local_fs
@@ -55,12 +56,14 @@ public class TestViewFileSystemWithAutho
     fsView = FileSystem.get(schemeWithAuthority, conf);
   }
 
+  @Override
   @After
   public void tearDown() throws Exception {
     fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true);
     super.tearDown();
   }
  
+  @Override
   @Test
   public void testBasicPaths() {
     Assert.assertEquals(schemeWithAuthority,

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java Fri Oct 19 02:25:55 2012
@@ -26,6 +26,7 @@ import org.junit.Before;
 
 public class TestViewFsLocalFs extends ViewFsBaseTest {
 
+  @Override
   @Before
   public void setUp() throws Exception {
     // create the test root on local_fs
@@ -34,6 +35,7 @@ public class TestViewFsLocalFs extends V
     
   }
 
+  @Override
   @After
   public void tearDown() throws Exception {
     super.tearDown();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java Fri Oct 19 02:25:55 2012
@@ -30,7 +30,6 @@ import org.apache.hadoop.fs.TestTrash;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.mortbay.log.Log;
 
 public class TestViewFsTrash {
   FileSystem fsTarget;  // the target file system - the mount will point here
@@ -46,6 +45,7 @@ public class TestViewFsTrash {
       super();
       this.home = home;
     }
+    @Override
     public Path getHomeDirectory() {
       return home;
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java Fri Oct 19 02:25:55 2012
@@ -41,6 +41,7 @@ import org.junit.Test;
 public class TestViewFsWithAuthorityLocalFs extends ViewFsBaseTest {
   URI schemeWithAuthority;
 
+  @Override
   @Before
   public void setUp() throws Exception {
     // create the test root on local_fs
@@ -54,11 +55,13 @@ public class TestViewFsWithAuthorityLoca
     fcView = FileContext.getFileContext(schemeWithAuthority, conf);  
   }
 
+  @Override
   @After
   public void tearDown() throws Exception {
     super.tearDown();
   }
   
+  @Override
   @Test
   public void testBasicPaths() {
       Assert.assertEquals(schemeWithAuthority,

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java Fri Oct 19 02:25:55 2012
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.net.URISyntaxException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileChecksum;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java Fri Oct 19 02:25:55 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.fs.viewfs;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.List;
 
 
@@ -137,9 +138,9 @@ public class ViewFileSystemBaseTest {
    */
   @Test
   public void testGetDelegationTokens() throws IOException {
-    List<Token<?>> delTokens = 
-        fsView.getDelegationTokens("sanjay");
-    Assert.assertEquals(getExpectedDelegationTokenCount(), delTokens.size()); 
+    Token<?>[] delTokens = 
+        fsView.addDelegationTokens("sanjay", new Credentials());
+    Assert.assertEquals(getExpectedDelegationTokenCount(), delTokens.length); 
   }
   
   int getExpectedDelegationTokenCount() {
@@ -150,29 +151,20 @@ public class ViewFileSystemBaseTest {
   public void testGetDelegationTokensWithCredentials() throws IOException {
     Credentials credentials = new Credentials();
     List<Token<?>> delTokens =
-        fsView.getDelegationTokens("sanjay", credentials);
+        Arrays.asList(fsView.addDelegationTokens("sanjay", credentials));
 
     int expectedTokenCount = getExpectedDelegationTokenCountWithCredentials();
 
     Assert.assertEquals(expectedTokenCount, delTokens.size());
+    Credentials newCredentials = new Credentials();
     for (int i = 0; i < expectedTokenCount / 2; i++) {
       Token<?> token = delTokens.get(i);
-      credentials.addToken(token.getService(), token);
+      newCredentials.addToken(token.getService(), token);
     }
 
     List<Token<?>> delTokens2 =
-        fsView.getDelegationTokens("sanjay", credentials);
-    Assert.assertEquals(expectedTokenCount, delTokens2.size());
-
-    for (int i = 0; i < delTokens2.size(); i++) {
-      for (int j = 0; j < delTokens.size(); j++) {
-        if (delTokens.get(j) == delTokens2.get(i)) {
-          delTokens.remove(j);
-          break;
-        }
-      }
-    }
-    Assert.assertEquals((expectedTokenCount + 1) / 2, delTokens.size());
+        Arrays.asList(fsView.addDelegationTokens("sanjay", newCredentials));
+    Assert.assertEquals((expectedTokenCount + 1) / 2, delTokens2.size());
   }
 
   int getExpectedDelegationTokenCountWithCredentials() {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java Fri Oct 19 02:25:55 2012
@@ -23,6 +23,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
 import org.apache.zookeeper.KeeperException.NoNodeException;
 import org.apache.zookeeper.data.Stat;
 import org.apache.zookeeper.server.ZooKeeperServer;
@@ -36,7 +37,7 @@ public abstract class ActiveStandbyElect
   public static void waitForActiveLockData(TestContext ctx,
       ZooKeeperServer zks, String parentDir, byte[] activeData)
       throws Exception {
-    long st = System.currentTimeMillis();
+    long st = Time.now();
     long lastPrint = st;
     while (true) {
       if (ctx != null) {
@@ -51,17 +52,17 @@ public abstract class ActiveStandbyElect
             Arrays.equals(activeData, data)) {
           return;
         }
-        if (System.currentTimeMillis() > lastPrint + LOG_INTERVAL_MS) {
+        if (Time.now() > lastPrint + LOG_INTERVAL_MS) {
           LOG.info("Cur data: " + StringUtils.byteToHexString(data));
-          lastPrint = System.currentTimeMillis();
+          lastPrint = Time.now();
         }
       } catch (NoNodeException nne) {
         if (activeData == null) {
           return;
         }
-        if (System.currentTimeMillis() > lastPrint + LOG_INTERVAL_MS) {
+        if (Time.now() > lastPrint + LOG_INTERVAL_MS) {
           LOG.info("Cur data: no node");
-          lastPrint = System.currentTimeMillis();
+          lastPrint = Time.now();
         }
       }
       Thread.sleep(50);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java Fri Oct 19 02:25:55 2012
@@ -25,13 +25,13 @@ import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.net.Socket;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
+import org.apache.hadoop.util.Time;
 import org.apache.zookeeper.PortAssignment;
 import org.apache.zookeeper.TestableZooKeeper;
 import org.apache.zookeeper.WatchedEvent;
@@ -81,6 +81,7 @@ public abstract class ClientBaseWithFixe
      *
      */
     protected class NullWatcher implements Watcher {
+        @Override
         public void process(WatchedEvent event) { /* nada */ }
     }
 
@@ -96,6 +97,7 @@ public abstract class ClientBaseWithFixe
             clientConnected = new CountDownLatch(1);
             connected = false;
         }
+        @Override
         synchronized public void process(WatchedEvent event) {
             if (event.getState() == KeeperState.SyncConnected ||
                 event.getState() == KeeperState.ConnectedReadOnly) {
@@ -111,11 +113,11 @@ public abstract class ClientBaseWithFixe
             return connected;
         }
         synchronized void waitForConnected(long timeout) throws InterruptedException, TimeoutException {
-            long expire = System.currentTimeMillis() + timeout;
+            long expire = Time.now() + timeout;
             long left = timeout;
             while(!connected && left > 0) {
                 wait(left);
-                left = expire - System.currentTimeMillis();
+                left = expire - Time.now();
             }
             if (!connected) {
                 throw new TimeoutException("Did not connect");
@@ -123,11 +125,11 @@ public abstract class ClientBaseWithFixe
             }
         }
         synchronized void waitForDisconnected(long timeout) throws InterruptedException, TimeoutException {
-            long expire = System.currentTimeMillis() + timeout;
+            long expire = Time.now() + timeout;
             long left = timeout;
             while(connected && left > 0) {
                 wait(left);
-                left = expire - System.currentTimeMillis();
+                left = expire - Time.now();
             }
             if (connected) {
                 throw new TimeoutException("Did not disconnect");
@@ -248,7 +250,7 @@ public abstract class ClientBaseWithFixe
     }
 
     public static boolean waitForServerUp(String hp, long timeout) {
-        long start = System.currentTimeMillis();
+        long start = Time.now();
         while (true) {
             try {
                 // if there are multiple hostports, just take the first one
@@ -263,7 +265,7 @@ public abstract class ClientBaseWithFixe
                 LOG.info("server " + hp + " not up " + e);
             }
 
-            if (System.currentTimeMillis() > start + timeout) {
+            if (Time.now() > start + timeout) {
                 break;
             }
             try {
@@ -275,7 +277,7 @@ public abstract class ClientBaseWithFixe
         return false;
     }
     public static boolean waitForServerDown(String hp, long timeout) {
-        long start = System.currentTimeMillis();
+        long start = Time.now();
         while (true) {
             try {
                 HostPort hpobj = parseHostPortList(hp).get(0);
@@ -284,7 +286,7 @@ public abstract class ClientBaseWithFixe
                 return true;
             }
 
-            if (System.currentTimeMillis() > start + timeout) {
+            if (Time.now() > start + timeout) {
                 break;
             }
             try {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java Fri Oct 19 02:25:55 2012
@@ -184,6 +184,7 @@ class DummyHAService extends HAServiceTa
   }
   
   public static class DummyFencer implements FenceMethod {
+    @Override
     public void checkArgs(String args) throws BadFencingConfigurationException {
     }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java Fri Oct 19 02:25:55 2012
@@ -40,7 +40,9 @@ public class TestHAAdmin {
   
   private HAAdmin tool;
   private ByteArrayOutputStream errOutBytes = new ByteArrayOutputStream();
+  private ByteArrayOutputStream outBytes = new ByteArrayOutputStream();
   private String errOutput;
+  private String output;
 
   @Before
   public void setup() throws IOException {
@@ -53,12 +55,14 @@ public class TestHAAdmin {
     };
     tool.setConf(new Configuration());
     tool.errOut = new PrintStream(errOutBytes);
+    tool.out = new PrintStream(outBytes);
   }
   
   private void assertOutputContains(String string) {
-    if (!errOutput.contains(string)) {
-      fail("Expected output to contain '" + string + "' but was:\n" +
-          errOutput);
+    if (!errOutput.contains(string) && !output.contains(string)) {
+      fail("Expected output to contain '" + string + 
+          "' but err_output was:\n" + errOutput + 
+          "\n and output was: \n" + output);
     }
   }
   
@@ -88,17 +92,19 @@ public class TestHAAdmin {
 
   @Test
   public void testHelp() throws Exception {
-    assertEquals(-1, runTool("-help"));
+    assertEquals(0, runTool("-help"));
     assertEquals(0, runTool("-help", "transitionToActive"));
     assertOutputContains("Transitions the service into Active");
   }
 
   private Object runTool(String ... args) throws Exception {
     errOutBytes.reset();
+    outBytes.reset();
     LOG.info("Running: HAAdmin " + Joiner.on(" ").join(args));
     int ret = tool.run(args);
     errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
-    LOG.info("Output:\n" + errOutput);
+    output = new String(outBytes.toByteArray(), Charsets.UTF_8);
+    LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
     return ret;
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java Fri Oct 19 02:25:55 2012
@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 import org.apache.hadoop.ha.HealthMonitor.Callback;
 import org.apache.hadoop.ha.HealthMonitor.State;
+import org.apache.hadoop.util.Time;
 
 import org.junit.Before;
 import org.junit.Test;
@@ -136,8 +137,8 @@ public class TestHealthMonitor {
 
   private void waitForState(HealthMonitor hm, State state)
       throws InterruptedException {
-    long st = System.currentTimeMillis();
-    while (System.currentTimeMillis() - st < 2000) {
+    long st = Time.now();
+    while (Time.now() - st < 2000) {
       if (hm.getHealthState() == state) {
         return;
       }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java Fri Oct 19 02:25:55 2012
@@ -28,6 +28,7 @@ import org.apache.hadoop.ha.HAServicePro
 import org.apache.hadoop.ha.HealthMonitor.State;
 import org.apache.hadoop.ha.MiniZKFCCluster.DummyZKFC;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Time;
 import org.apache.log4j.Level;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
@@ -394,9 +395,9 @@ public class TestZKFailoverController ex
       // Ask it to cede active for 3 seconds. It should respond promptly
       // (i.e. the RPC itself should not take 3 seconds!)
       ZKFCProtocol proxy = zkfc.getLocalTarget().getZKFCProxy(conf, 5000);
-      long st = System.currentTimeMillis();
+      long st = Time.now();
       proxy.cedeActive(3000);
-      long et = System.currentTimeMillis();
+      long et = Time.now();
       assertTrue("RPC to cedeActive took " + (et - st) + " ms",
           et - st < 1000);
       
@@ -408,7 +409,7 @@ public class TestZKFailoverController ex
       // After the prescribed 3 seconds, should go into STANDBY state,
       // since the other node in the cluster would have taken ACTIVE.
       cluster.waitForElectorState(0, ActiveStandbyElector.State.STANDBY);
-      long et2 = System.currentTimeMillis();
+      long et2 = Time.now();
       assertTrue("Should take ~3 seconds to rejoin. Only took " + (et2 - et) +
           "ms before rejoining.",
           et2 - et > 2800);      

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java Fri Oct 19 02:25:55 2012
@@ -21,6 +21,7 @@ import java.util.Random;
 
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Time;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -61,11 +62,11 @@ public class TestZKFailoverControllerStr
   @Test(timeout=(STRESS_RUNTIME_SECS + EXTRA_TIMEOUT_SECS) * 1000)
   public void testExpireBackAndForth() throws Exception {
     cluster.start();
-    long st = System.currentTimeMillis();
+    long st = Time.now();
     long runFor = STRESS_RUNTIME_SECS * 1000;
 
     int i = 0;
-    while (System.currentTimeMillis() - st < runFor) {
+    while (Time.now() - st < runFor) {
       // flip flop the services back and forth
       int from = i % 2;
       int to = (i + 1) % 2;
@@ -87,11 +88,11 @@ public class TestZKFailoverControllerStr
   @Test(timeout=(STRESS_RUNTIME_SECS + EXTRA_TIMEOUT_SECS) * 1000)
   public void testRandomExpirations() throws Exception {
     cluster.start();
-    long st = System.currentTimeMillis();
+    long st = Time.now();
     long runFor = STRESS_RUNTIME_SECS * 1000;
 
     Random r = new Random();
-    while (System.currentTimeMillis() - st < runFor) {
+    while (Time.now() - st < runFor) {
       cluster.getTestContext().checkException();
       int targetIdx = r.nextInt(2);
       ActiveStandbyElector target = cluster.getElector(targetIdx);
@@ -125,8 +126,8 @@ public class TestZKFailoverControllerStr
     // setting up the mock.
     cluster.start();
     
-    long st = System.currentTimeMillis();
-    while (System.currentTimeMillis() - st < runFor) {
+    long st = Time.now();
+    while (Time.now() - st < runFor) {
       cluster.getTestContext().checkException();
       serverFactory.closeAll();
       Thread.sleep(50);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java Fri Oct 19 02:25:55 2012
@@ -46,14 +46,17 @@ public class TestGlobalFilter extends Ht
   static public class RecordingFilter implements Filter {
     private FilterConfig filterConfig = null;
 
+    @Override
     public void init(FilterConfig filterConfig) {
       this.filterConfig = filterConfig;
     }
 
+    @Override
     public void destroy() {
       this.filterConfig = null;
     }
 
+    @Override
     public void doFilter(ServletRequest request, ServletResponse response,
         FilterChain chain) throws IOException, ServletException {
       if (filterConfig == null)
@@ -69,6 +72,7 @@ public class TestGlobalFilter extends Ht
     static public class Initializer extends FilterInitializer {
       public Initializer() {}
 
+      @Override
       public void initFilter(FilterContainer container, Configuration conf) {
         container.addGlobalFilter("recording", RecordingFilter.class.getName(), null);
       }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java Fri Oct 19 02:25:55 2012
@@ -46,14 +46,17 @@ public class TestPathFilter extends Http
   static public class RecordingFilter implements Filter {
     private FilterConfig filterConfig = null;
 
+    @Override
     public void init(FilterConfig filterConfig) {
       this.filterConfig = filterConfig;
     }
 
+    @Override
     public void destroy() {
       this.filterConfig = null;
     }
 
+    @Override
     public void doFilter(ServletRequest request, ServletResponse response,
         FilterChain chain) throws IOException, ServletException {
       if (filterConfig == null)
@@ -69,6 +72,7 @@ public class TestPathFilter extends Http
     static public class Initializer extends FilterInitializer {
       public Initializer() {}
 
+      @Override
       public void initFilter(FilterContainer container, Configuration conf) {
         container.addFilter("recording", RecordingFilter.class.getName(), null);
       }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java Fri Oct 19 02:25:55 2012
@@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletReq
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
 
 public class TestServletFilter extends HttpServerFunctionalTest {
@@ -45,14 +46,17 @@ public class TestServletFilter extends H
   static public class SimpleFilter implements Filter {
     private FilterConfig filterConfig = null;
 
+    @Override
     public void init(FilterConfig filterConfig) throws ServletException {
       this.filterConfig = filterConfig;
     }
 
+    @Override
     public void destroy() {
       this.filterConfig = null;
     }
 
+    @Override
     public void doFilter(ServletRequest request, ServletResponse response,
         FilterChain chain) throws IOException, ServletException {
       if (filterConfig == null)
@@ -67,6 +71,7 @@ public class TestServletFilter extends H
     static public class Initializer extends FilterInitializer {
       public Initializer() {}
 
+      @Override
       public void initFilter(FilterContainer container, Configuration conf) {
         container.addFilter("simple", SimpleFilter.class.getName(), null);
       }
@@ -149,6 +154,7 @@ public class TestServletFilter extends H
       public Initializer() {
       }
 
+      @Override
       public void initFilter(FilterContainer container, Configuration conf) {
         container.addFilter("simple", ErrorFilter.class.getName(), null);
       }
@@ -158,7 +164,7 @@ public class TestServletFilter extends H
   @Test
   public void testServletFilterWhenInitThrowsException() throws Exception {
     Configuration conf = new Configuration();
-    // start a http server with CountingFilter
+    // start a http server with ErrorFilter
     conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
         ErrorFilter.Initializer.class.getName());
     HttpServer http = createTestServer(conf);
@@ -169,4 +175,25 @@ public class TestServletFilter extends H
       assertTrue( e.getMessage().contains("Problem in starting http server. Server handlers failed"));
     }
   }
+  
+  /**
+   * Similar to the above test case, except that it uses a different API to add the
+   * filter. Regression test for HADOOP-8786.
+   */
+  @Test
+  public void testContextSpecificServletFilterWhenInitThrowsException()
+      throws Exception {
+    Configuration conf = new Configuration();
+    HttpServer http = createTestServer(conf);
+    http.defineFilter(http.webAppContext,
+        "ErrorFilter", ErrorFilter.class.getName(),
+        null, null);
+    try {
+      http.start();
+      fail("expecting exception");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
+    }
+  }
+
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java Fri Oct 19 02:25:55 2012
@@ -18,12 +18,10 @@
 
 package org.apache.hadoop.io;
 
-import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.lang.reflect.Type;
 
 import org.apache.avro.Schema;
-import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.reflect.ReflectDatumWriter;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java Fri Oct 19 02:25:55 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.io;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Random;
 
 
@@ -40,11 +41,13 @@ public class RandomDatum implements Writ
     return length;
   }
   
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(length);
     out.write(data);
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     length = in.readInt();
     if (data == null || length > data.length)
@@ -63,6 +66,11 @@ public class RandomDatum implements Writ
     return compareTo((RandomDatum)o) == 0;
   }
 
+  @Override
+  public int hashCode() {
+    return Arrays.hashCode(this.data);
+  }
+  
   private static final char[] HEX_DIGITS =
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
 
@@ -102,6 +110,7 @@ public class RandomDatum implements Writ
       super(RandomDatum.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       int n1 = readInt(b1, s1);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java Fri Oct 19 02:25:55 2012
@@ -19,17 +19,9 @@
 package org.apache.hadoop.io;
 
 import java.io.IOException;
-import java.io.ByteArrayOutputStream;
 import java.util.EnumSet;
 import java.lang.reflect.Type;
 
-import org.apache.avro.Schema;
-import org.apache.avro.reflect.ReflectData;
-import org.apache.avro.reflect.ReflectDatumWriter;
-import org.apache.avro.reflect.ReflectDatumReader;
-import org.apache.avro.io.BinaryEncoder;
-import org.apache.avro.io.DecoderFactory;
-
 import junit.framework.TestCase;
 
 /** Unit test for EnumSetWritable */

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java Fri Oct 19 02:25:55 2012
@@ -48,9 +48,11 @@ public class TestGenericWritable extends
   /** Dummy class for testing {@link GenericWritable} */
   public static class Foo implements Writable {
     private String foo = "foo";
+    @Override
     public void readFields(DataInput in) throws IOException {
       foo = Text.readString(in);
     }
+    @Override
     public void write(DataOutput out) throws IOException {
       Text.writeString(out, foo);
     }
@@ -65,15 +67,19 @@ public class TestGenericWritable extends
   public static class Bar implements Writable, Configurable {
     private int bar = 42; //The Answer to The Ultimate Question Of Life, the Universe and Everything
     private Configuration conf = null;
+    @Override
     public void readFields(DataInput in) throws IOException {
       bar = in.readInt();
     }
+    @Override
     public void write(DataOutput out) throws IOException {
       out.writeInt(bar);
     }
+    @Override
     public Configuration getConf() {
       return conf;
     }
+    @Override
     public void setConf(Configuration conf) {
       this.conf = conf;
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java Fri Oct 19 02:25:55 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.io;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
+import java.io.ByteArrayInputStream;
+import java.io.EOFException;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -65,6 +67,36 @@ public class TestIOUtils {
   }
 
   @Test
+  public void testCopyBytesShouldCloseInputSteamWhenOutputStreamCloseThrowsRunTimeException()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[1]);
+    Mockito.doThrow(new RuntimeException()).when(outputStream).close();
+    try {
+      IOUtils.copyBytes(inputStream, outputStream, 1, true);
+      fail("Didn't throw exception");
+    } catch (RuntimeException e) {
+    }
+    Mockito.verify(outputStream, Mockito.atLeastOnce()).close();
+  }
+
+  @Test
+  public void testCopyBytesShouldCloseInputSteamWhenInputStreamCloseThrowsRunTimeException()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[1]);
+    Mockito.doThrow(new RuntimeException()).when(inputStream).close();
+    try {
+      IOUtils.copyBytes(inputStream, outputStream, 1, true);
+      fail("Didn't throw exception");
+    } catch (RuntimeException e) {
+    }
+    Mockito.verify(inputStream, Mockito.atLeastOnce()).close();
+  }
+
+  @Test
   public void testCopyBytesShouldNotCloseStreamsWhenCloseIsFalse()
       throws Exception {
     InputStream inputStream = Mockito.mock(InputStream.class);
@@ -74,7 +106,7 @@ public class TestIOUtils {
     Mockito.verify(inputStream, Mockito.atMost(0)).close();
     Mockito.verify(outputStream, Mockito.atMost(0)).close();
   }
-  
+
   @Test
   public void testCopyBytesWithCountShouldCloseStreamsWhenCloseIsTrue()
       throws Exception {
@@ -115,7 +147,7 @@ public class TestIOUtils {
     Mockito.verify(inputStream, Mockito.atLeastOnce()).close();
     Mockito.verify(outputStream, Mockito.atLeastOnce()).close();
   }
-  
+
   @Test
   public void testWriteFully() throws IOException {
     final int INPUT_BUFFER_LEN = 10000;
@@ -146,6 +178,7 @@ public class TestIOUtils {
       for (int i = HALFWAY; i < input.length; i++) {
         assertEquals(input[i - HALFWAY], output[i]);
       }
+      raf.close();
     } finally {
       File f = new File(TEST_FILE_NAME);
       if (f.exists()) {
@@ -175,4 +208,41 @@ public class TestIOUtils {
           "Error while reading compressed data", ioe);
     }
   }
+
+  @Test
+  public void testSkipFully() throws IOException {
+    byte inArray[] = new byte[] {0, 1, 2, 3, 4};
+    ByteArrayInputStream in = new ByteArrayInputStream(inArray);
+    try {
+      in.mark(inArray.length);
+      IOUtils.skipFully(in, 2);
+      IOUtils.skipFully(in, 2);
+      try {
+        IOUtils.skipFully(in, 2);
+        fail("expected to get a PrematureEOFException");
+      } catch (EOFException e) {
+        assertEquals(e.getMessage(), "Premature EOF from inputStream " +
+            "after skipping 1 byte(s).");
+      }
+      in.reset();
+      try {
+        IOUtils.skipFully(in, 20);
+        fail("expected to get a PrematureEOFException");
+      } catch (EOFException e) {
+        assertEquals(e.getMessage(), "Premature EOF from inputStream " +
+            "after skipping 5 byte(s).");
+      }
+      in.reset();
+      IOUtils.skipFully(in, 5);
+      try {
+        IOUtils.skipFully(in, 10);
+        fail("expected to get a PrematureEOFException");
+      } catch (EOFException e) {
+        assertEquals(e.getMessage(), "Premature EOF from inputStream " +
+            "after skipping 0 byte(s).");
+      }
+    } finally {
+      in.close();
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java Fri Oct 19 02:25:55 2012
@@ -91,6 +91,7 @@ public class TestMD5Hash extends TestCas
                closeHash1.hashCode() != closeHash2.hashCode());
      
     Thread t1 = new Thread() {      
+      @Override
       public void run() {
         for (int i = 0; i < 100; i++) {
           MD5Hash hash = new MD5Hash(DFF);
@@ -100,6 +101,7 @@ public class TestMD5Hash extends TestCas
     };
     
     Thread t2 = new Thread() {
+      @Override
       public void run() {
         for (int i = 0; i < 100; i++) {
           MD5Hash hash = new MD5Hash(D00);