You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2017/08/25 16:56:12 UTC

[1/2] hadoop git commit: HADOOP-14729. Upgrade JUnit 3 test cases to JUnit 4. Contributed by Ajay Kumar.

Repository: hadoop
Updated Branches:
  refs/heads/trunk 3a4e86116 -> 8b7cbe384


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java
index 3289d78..88f3b69 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java
@@ -18,14 +18,11 @@
 package org.apache.hadoop.util;
 
 import java.io.IOException;
+import org.junit.Test;
 
-import org.apache.hadoop.util.CacheableIPList;
-import org.apache.hadoop.util.FileBasedIPList;
+import static org.junit.Assert.*;
 
-
-import junit.framework.TestCase;
-
-public class TestCacheableIPList extends TestCase {
+public class TestCacheableIPList {
 
   /**
    * Add a bunch of subnets and IPSs to the file
@@ -37,6 +34,7 @@ public class TestCacheableIPList extends TestCase {
    * test for inclusion
    * Check for exclusion
    */
+  @Test
   public void testAddWithSleepForCacheTimeout() throws IOException, InterruptedException {
 
     String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.113.221.221"};
@@ -76,6 +74,7 @@ public class TestCacheableIPList extends TestCase {
    * test for inclusion
    * Check for exclusion
    */
+  @Test
   public void testRemovalWithSleepForCacheTimeout() throws IOException, InterruptedException {
 
     String[] ips = {"10.119.103.112", "10.221.102.0/23",
@@ -115,6 +114,7 @@ public class TestCacheableIPList extends TestCase {
    * test for inclusion
    * Check for exclusion
    */
+  @Test
   public void testAddWithRefresh() throws IOException, InterruptedException {
 
     String[] ips = {"10.119.103.112", "10.221.102.0/23", "10.113.221.221"};
@@ -154,6 +154,7 @@ public class TestCacheableIPList extends TestCase {
    * test for inclusion
    * Check for exclusion
    */
+  @Test
   public void testRemovalWithRefresh() throws IOException, InterruptedException {
 
     String[] ips = {"10.119.103.112", "10.221.102.0/23",

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java
index 0e79fd1..1bb595c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java
@@ -22,14 +22,11 @@ import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.util.FileBasedIPList;
-import org.apache.hadoop.util.IPList;
 import org.junit.After;
 import org.junit.Test;
+import static org.junit.Assert.*;
 
-import junit.framework.TestCase;
-
-public class TestFileBasedIPList extends TestCase {
+public class TestFileBasedIPList {
 
   @After
   public void tearDown() {
@@ -127,6 +124,7 @@ public class TestFileBasedIPList extends TestCase {
    * test for inclusion
    * should be true as if the feature is turned off
    */
+  @Test
   public void testFileNotSpecified() {
 
     IPList ipl = new FileBasedIPList(null);
@@ -140,6 +138,7 @@ public class TestFileBasedIPList extends TestCase {
    * test for inclusion
    * should be true as if the feature is turned off
    */
+  @Test
   public void testFileMissing() {
 
     IPList ipl = new FileBasedIPList("missingips.txt");
@@ -153,6 +152,7 @@ public class TestFileBasedIPList extends TestCase {
    * test for inclusion
    * should be true as if the feature is turned off
    */
+  @Test
   public void testWithEmptyList() throws IOException {
     String[] ips = {};
 
@@ -168,6 +168,7 @@ public class TestFileBasedIPList extends TestCase {
    * test for inclusion
    * should be true as if the feature is turned off
    */
+  @Test
   public void testForBadFIle() throws IOException {
     String[] ips = { "10.221.102/23"};
 
@@ -187,6 +188,7 @@ public class TestFileBasedIPList extends TestCase {
    * Check  for inclusion with good entries
    * Check for exclusion
    */
+  @Test
   public void testWithAWrongEntry() throws IOException {
 
     String[] ips = {"10.119.103.112", "10.221.102/23", "10.221.204.1/23"};

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java
index 3a4ebd5..8ba930b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.util;
 
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.apache.hadoop.util.FindClass;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericsUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericsUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericsUtil.java
index 58537ad..85d649c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericsUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericsUtil.java
@@ -21,12 +21,14 @@ package org.apache.hadoop.util;
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 
-public class TestGenericsUtil extends TestCase {
+public class TestGenericsUtil {
 
+  @Test
   public void testToArray() {
 
     //test a list of size 10
@@ -45,6 +47,7 @@ public class TestGenericsUtil extends TestCase {
     }
   }
 
+  @Test
   public void testWithEmptyList() {
     try {
       List<String> list = new ArrayList<String>();
@@ -57,6 +60,7 @@ public class TestGenericsUtil extends TestCase {
     }
   }
 
+  @Test
   public void testWithEmptyList2() {
     List<String> list = new ArrayList<String>();
     //this method should not throw IndexOutOfBoundsException
@@ -81,6 +85,7 @@ public class TestGenericsUtil extends TestCase {
     }
   }
 
+  @Test
   public void testWithGenericClass() {
 
     GenericClass<String> testSubject = new GenericClass<String>();
@@ -102,6 +107,7 @@ public class TestGenericsUtil extends TestCase {
 
   }
 
+  @Test
   public void testGenericOptionsParser() throws Exception {
      GenericOptionsParser parser = new GenericOptionsParser(
         new Configuration(), new String[] {"-jt"});
@@ -116,6 +122,7 @@ public class TestGenericsUtil extends TestCase {
             "y=z", parser.getConfiguration().get("x"));
   }
 
+  @Test
   public void testGetClass() {
 
     //test with Integer
@@ -131,6 +138,7 @@ public class TestGenericsUtil extends TestCase {
             GenericClass.class, c2);
   }
 
+  @Test
   public void testIsLog4jLogger() throws Exception {
     assertFalse("False if clazz is null", GenericsUtil.isLog4jLogger(null));
     assertTrue("The implementation is Log4j",

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIndexedSort.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIndexedSort.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIndexedSort.java
index 8f33c9d..3de0854 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIndexedSort.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIndexedSort.java
@@ -21,14 +21,15 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Random;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparator;
 
-public class TestIndexedSort extends TestCase {
+public class TestIndexedSort {
 
   public void sortAllEqual(IndexedSorter sorter) throws Exception {
     final int SAMPLE = 500;
@@ -128,6 +129,7 @@ public class TestIndexedSort extends TestCase {
   }
 
 
+  @Test
   public void testQuickSort() throws Exception {
     QuickSort sorter = new QuickSort();
     sortRandom(sorter);
@@ -158,6 +160,7 @@ public class TestIndexedSort extends TestCase {
     assertTrue(Arrays.equals(values, check));
   }
 
+  @Test
   public void testHeapSort() throws Exception {
     HeapSort sorter = new HeapSort();
     sortRandom(sorter);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java
index 7589e5a..e4792dc 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java
@@ -19,13 +19,13 @@ package org.apache.hadoop.util;
 
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
-
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.util.ExitUtil.ExitException;
-import org.junit.Test;
 
-public class TestNativeLibraryChecker extends TestCase {
+
+public class TestNativeLibraryChecker {
   private void expectExit(String [] args) {
     try {
       // should throw exit exception

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
index 341f38d..d1d392e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
@@ -28,7 +28,8 @@ import java.util.jar.JarOutputStream;
 import java.util.zip.ZipEntry;
 
 import org.junit.Assert;
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -49,8 +50,6 @@ import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.Test;
-
 /**
  * Tests the use of the
  * {@link org.apache.hadoop.mapreduce.filecache.DistributedCache} within the
@@ -66,7 +65,7 @@ import org.junit.Test;
  * This test is not fast: it uses MiniMRCluster.
  */
 @SuppressWarnings("deprecation")
-public class TestMRWithDistributedCache extends TestCase {
+public class TestMRWithDistributedCache {
   private static Path TEST_ROOT_DIR =
     new Path(System.getProperty("test.build.data","/tmp"));
   private static File symlinkFile = new File("distributed.first.symlink");
@@ -97,23 +96,23 @@ public class TestMRWithDistributedCache extends TestCase {
       FileSystem fs = LocalFileSystem.get(conf);
 
       // Check that 2 files and 2 archives are present
-      TestCase.assertEquals(2, localFiles.length);
-      TestCase.assertEquals(2, localArchives.length);
-      TestCase.assertEquals(2, files.length);
-      TestCase.assertEquals(2, archives.length);
+      Assert.assertEquals(2, localFiles.length);
+      Assert.assertEquals(2, localArchives.length);
+      Assert.assertEquals(2, files.length);
+      Assert.assertEquals(2, archives.length);
 
       // Check the file name
-      TestCase.assertTrue(files[0].getPath().endsWith("distributed.first"));
-      TestCase.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
+      Assert.assertTrue(files[0].getPath().endsWith("distributed.first"));
+      Assert.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
       
       // Check lengths of the files
-      TestCase.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
-      TestCase.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
+      Assert.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
+      Assert.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
 
       // Check extraction of the archive
-      TestCase.assertTrue(fs.exists(new Path(localArchives[0],
+      Assert.assertTrue(fs.exists(new Path(localArchives[0],
           "distributed.jar.inside3")));
-      TestCase.assertTrue(fs.exists(new Path(localArchives[1],
+      Assert.assertTrue(fs.exists(new Path(localArchives[1],
           "distributed.jar.inside4")));
 
       // Check the class loaders
@@ -121,18 +120,18 @@ public class TestMRWithDistributedCache extends TestCase {
       ClassLoader cl = Thread.currentThread().getContextClassLoader();
       // Both the file and the archive were added to classpath, so both
       // should be reachable via the class loader.
-      TestCase.assertNotNull(cl.getResource("distributed.jar.inside2"));
-      TestCase.assertNotNull(cl.getResource("distributed.jar.inside3"));
-      TestCase.assertNull(cl.getResource("distributed.jar.inside4"));
+      Assert.assertNotNull(cl.getResource("distributed.jar.inside2"));
+      Assert.assertNotNull(cl.getResource("distributed.jar.inside3"));
+      Assert.assertNull(cl.getResource("distributed.jar.inside4"));
 
       // Check that the symlink for the renaming was created in the cwd;
-      TestCase.assertTrue("symlink distributed.first.symlink doesn't exist",
+      Assert.assertTrue("symlink distributed.first.symlink doesn't exist",
           symlinkFile.exists());
-      TestCase.assertEquals("symlink distributed.first.symlink length not 1", 1,
+      Assert.assertEquals("symlink distributed.first.symlink length not 1", 1,
           symlinkFile.length());
       
       //This last one is a difference between MRv2 and MRv1
-      TestCase.assertTrue("second file should be symlinked too",
+      Assert.assertTrue("second file should be symlinked too",
           expectedAbsentSymlinkFile.exists());
     }
 
@@ -188,6 +187,7 @@ public class TestMRWithDistributedCache extends TestCase {
   }
 
   /** Tests using the local job runner. */
+  @Test
   public void testLocalJobRunner() throws Exception {
     symlinkFile.delete(); // ensure symlink is not present (e.g. if test is
                           // killed part way through)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
index e15f7ab..999561a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
@@ -23,7 +23,8 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.net.URI;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import org.junit.Assert;
 
 import org.apache.hadoop.fs.FileStatus;
@@ -38,7 +39,7 @@ import org.apache.hadoop.io.Text;
 
 
 @SuppressWarnings("unchecked")
-public class TestFileOutputCommitter extends TestCase {
+public class TestFileOutputCommitter {
   private static Path outDir = new Path(System.getProperty("test.build.data",
       "/tmp"), "output");
 
@@ -153,14 +154,18 @@ public class TestFileOutputCommitter extends TestCase {
     validateContent(outDir);
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
+
+  @Test
   public void testRecoveryV1() throws Exception {
     testRecoveryInternal(1, 1);
   }
 
+  @Test
   public void testRecoveryV2() throws Exception {
     testRecoveryInternal(2, 2);
   }
 
+  @Test
   public void testRecoveryUpgradeV1V2() throws Exception {
     testRecoveryInternal(1, 2);
   }
@@ -203,11 +208,13 @@ public class TestFileOutputCommitter extends TestCase {
     assert(dataFileFound && indexFileFound);
   }
 
+  @Test
   public void testCommitterWithFailureV1() throws Exception {
     testCommitterWithFailureInternal(1, 1);
     testCommitterWithFailureInternal(1, 2);
   }
 
+  @Test
   public void testCommitterWithFailureV2() throws Exception {
     testCommitterWithFailureInternal(2, 1);
     testCommitterWithFailureInternal(2, 2);
@@ -256,10 +263,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testCommitterWithDuplicatedCommitV1() throws Exception {
     testCommitterWithDuplicatedCommitInternal(1);
   }
 
+  @Test
   public void testCommitterWithDuplicatedCommitV2() throws Exception {
     testCommitterWithDuplicatedCommitInternal(2);
   }
@@ -340,10 +349,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testCommitterV1() throws Exception {
     testCommitterInternal(1);
   }
 
+  @Test
   public void testCommitterV2() throws Exception {
     testCommitterInternal(2);
   }
@@ -380,18 +391,22 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testMapFileOutputCommitterV1() throws Exception {
     testMapFileOutputCommitterInternal(1);
   }
 
+  @Test
   public void testMapFileOutputCommitterV2() throws Exception {
     testMapFileOutputCommitterInternal(2);
   }
 
+  @Test
   public void testMapOnlyNoOutputV1() throws Exception {
     testMapOnlyNoOutputInternal(1);
   }
 
+  @Test
   public void testMapOnlyNoOutputV2() throws Exception {
     testMapOnlyNoOutputInternal(2);
   }
@@ -456,10 +471,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(out);
   }
 
+  @Test
   public void testAbortV1() throws Exception {
     testAbortInternal(1);
   }
 
+  @Test
   public void testAbortV2() throws Exception {
     testAbortInternal(2);
   }
@@ -537,10 +554,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testFailAbortV1() throws Exception {
     testFailAbortInternal(1);
   }
 
+  @Test
   public void testFailAbortV2() throws Exception {
     testFailAbortInternal(2);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
index b6a2df0..0cc3c66 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
@@ -32,14 +32,16 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestIndexCache extends TestCase {
+public class TestIndexCache {
   private JobConf conf;
   private FileSystem fs;
   private Path p;
 
-  @Override
+  @Before
   public void setUp() throws IOException {
     conf = new JobConf();
     fs = FileSystem.getLocal(conf).getRaw();
@@ -47,6 +49,7 @@ public class TestIndexCache extends TestCase {
         "cache").makeQualified(fs.getUri(), fs.getWorkingDirectory());
   }
 
+  @Test
   public void testLRCPolicy() throws Exception {
     Random r = new Random();
     long seed = r.nextLong();
@@ -120,6 +123,7 @@ public class TestIndexCache extends TestCase {
     checkRecord(rec, totalsize);
   }
 
+  @Test
   public void testBadIndex() throws Exception {
     final int parts = 30;
     fs.delete(p, true);
@@ -152,6 +156,7 @@ public class TestIndexCache extends TestCase {
     }
   }
 
+  @Test
   public void testInvalidReduceNumberOrLength() throws Exception {
     fs.delete(p, true);
     conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
@@ -192,6 +197,7 @@ public class TestIndexCache extends TestCase {
     }
   }
 
+  @Test
   public void testRemoveMap() throws Exception {
     // This test case use two thread to call getIndexInformation and 
     // removeMap concurrently, in order to construct race condition.
@@ -241,7 +247,8 @@ public class TestIndexCache extends TestCase {
       assertEquals(true, cache.checkTotalMemoryUsed());
     }      
   }
-  
+
+  @Test
   public void testCreateRace() throws Exception {
     fs.delete(p, true);
     conf.setInt(TTConfig.TT_INDEX_CACHE, 1);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java
index 7d3e2ed..75893f5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobEndNotifier.java
@@ -31,12 +31,15 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer2;
+import org.junit.Test;
 
-public class TestJobEndNotifier extends TestCase {
+public class TestJobEndNotifier {
   HttpServer2 server;
   URL baseUrl;
 
@@ -99,6 +102,7 @@ public class TestJobEndNotifier extends TestCase {
     }
   }
 
+  @Before
   public void setUp() throws Exception {
     new File(System.getProperty("build.webapps", "build/webapps") + "/test"
         ).mkdirs();
@@ -118,6 +122,7 @@ public class TestJobEndNotifier extends TestCase {
     FailServlet.calledTimes = 0;
   }
 
+  @After
   public void tearDown() throws Exception {
     server.stop();
   }
@@ -125,6 +130,7 @@ public class TestJobEndNotifier extends TestCase {
   /**
    * Basic validation for localRunnerNotification.
    */
+  @Test
   public void testLocalJobRunnerUriSubstitution() throws InterruptedException {
     JobStatus jobStatus = createTestJobStatus(
         "job_20130313155005308_0001", JobStatus.SUCCEEDED);
@@ -145,6 +151,7 @@ public class TestJobEndNotifier extends TestCase {
   /**
    * Validate job.end.retry.attempts for the localJobRunner.
    */
+  @Test
   public void testLocalJobRunnerRetryCount() throws InterruptedException {
     int retryAttempts = 3;
     JobStatus jobStatus = createTestJobStatus(
@@ -161,6 +168,7 @@ public class TestJobEndNotifier extends TestCase {
    * Validate that the notification times out after reaching
    * mapreduce.job.end-notification.timeout.
    */
+  @Test
   public void testNotificationTimeout() throws InterruptedException {
     Configuration conf = new Configuration();
     // Reduce the timeout to 1 second

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
index 1b533e7..fabe5f2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
@@ -33,7 +33,9 @@ import java.io.IOException;
 import java.io.LineNumberReader;
 import java.io.StringReader;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.TaskReport;
@@ -43,8 +45,6 @@ import org.apache.log4j.Layout;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.apache.log4j.WriterAppender;
-import org.junit.Before;
-import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -53,7 +53,7 @@ import org.mockito.stubbing.Answer;
  * job monitoring is correct and prints 100% for map and reduce before 
  * successful completion.
  */
-public class TestJobMonitorAndPrint extends TestCase {
+public class TestJobMonitorAndPrint {
   private Job job;
   private Configuration conf;
   private ClientProtocol clientProtocol;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
index 20d8ab5..abbfcb2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
@@ -27,7 +27,10 @@ import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import org.apache.hadoop.util.concurrent.HadoopExecutors;
 import org.junit.Assert;
 
@@ -55,7 +58,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 
 @SuppressWarnings("unchecked")
-public class TestFileOutputCommitter extends TestCase {
+public class TestFileOutputCommitter {
   private static final Path outDir = new Path(
       System.getProperty("test.build.data",
           System.getProperty("java.io.tmpdir")),
@@ -87,12 +90,12 @@ public class TestFileOutputCommitter extends TestCase {
     fs.delete(outDir, true);
   }
   
-  @Override
+  @Before
   public void setUp() throws IOException {
     cleanup();
   }
-  
-  @Override
+
+  @After
   public void tearDown() throws IOException {
     cleanup();
   }
@@ -195,14 +198,17 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testRecoveryV1() throws Exception {
     testRecoveryInternal(1, 1);
   }
 
+  @Test
   public void testRecoveryV2() throws Exception {
     testRecoveryInternal(2, 2);
   }
 
+  @Test
   public void testRecoveryUpgradeV1V2() throws Exception {
     testRecoveryInternal(1, 2);
   }
@@ -278,18 +284,22 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testCommitterV1() throws Exception {
     testCommitterInternal(1);
   }
 
+  @Test
   public void testCommitterV2() throws Exception {
     testCommitterInternal(2);
   }
-  
+
+  @Test
   public void testCommitterWithDuplicatedCommitV1() throws Exception {
     testCommitterWithDuplicatedCommitInternal(1);
   }
 
+  @Test
   public void testCommitterWithDuplicatedCommitV2() throws Exception {
     testCommitterWithDuplicatedCommitInternal(2);
   }
@@ -336,11 +346,13 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testCommitterWithFailureV1() throws Exception {
     testCommitterWithFailureInternal(1, 1);
     testCommitterWithFailureInternal(1, 2);
   }
 
+  @Test
   public void testCommitterWithFailureV2() throws Exception {
     testCommitterWithFailureInternal(2, 1);
     testCommitterWithFailureInternal(2, 2);
@@ -390,10 +402,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testCommitterRepeatableV1() throws Exception {
     testCommitterRetryInternal(1);
   }
 
+  @Test
   public void testCommitterRepeatableV2() throws Exception {
     testCommitterRetryInternal(2);
   }
@@ -493,14 +507,17 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testMapFileOutputCommitterV1() throws Exception {
     testMapFileOutputCommitterInternal(1);
   }
-  
+
+  @Test
   public void testMapFileOutputCommitterV2() throws Exception {
     testMapFileOutputCommitterInternal(2);
   }
 
+  @Test
   public void testInvalidVersionNumber() throws IOException {
     Job job = Job.getInstance();
     FileOutputFormat.setOutputPath(job, outDir);
@@ -552,10 +569,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testAbortV1() throws IOException, InterruptedException {
     testAbortInternal(1);
   }
 
+  @Test
   public void testAbortV2() throws IOException, InterruptedException {
     testAbortInternal(2);
   }
@@ -575,7 +594,7 @@ public class TestFileOutputCommitter extends TestCase {
     }
   }
 
-  
+
   private void testFailAbortInternal(int version)
       throws IOException, InterruptedException {
     Job job = Job.getInstance();
@@ -631,10 +650,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testFailAbortV1() throws Exception {
     testFailAbortInternal(1);
   }
 
+  @Test
   public void testFailAbortV2() throws Exception {
     testFailAbortInternal(2);
   }
@@ -732,10 +753,12 @@ public class TestFileOutputCommitter extends TestCase {
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
+  @Test
   public void testConcurrentCommitTaskWithSubDirV1() throws Exception {
     testConcurrentCommitTaskWithSubDir(1);
   }
 
+  @Test
   public void testConcurrentCommitTaskWithSubDirV2() throws Exception {
     testConcurrentCommitTaskWithSubDir(2);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputFormat.java
index a48fe3b..a5a8eb7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputFormat.java
@@ -19,7 +19,8 @@
 package org.apache.hadoop.mapreduce.lib.output;
 
 import java.io.IOException;
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -28,8 +29,9 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
-public class TestFileOutputFormat extends TestCase {
+public class TestFileOutputFormat {
 
+  @Test
   public void testSetOutputPathException() throws Exception {
     Job job = Job.getInstance();
     try {
@@ -42,6 +44,7 @@ public class TestFileOutputFormat extends TestCase {
     }
   }
 
+  @Test
   public void testCheckOutputSpecsException() throws Exception {
     Job job = Job.getInstance();
     Path outDir = new Path(System.getProperty("test.build.data", "/tmp"),

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
index 768448f..96954d5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
@@ -38,7 +38,7 @@ import org.junit.Test;
  * This class performs unit test for Job/JobControl classes.
  *  
  */
-public class TestJobControl extends junit.framework.TestCase {
+public class TestJobControl {
 
   /**
    * This is a main function for testing JobControl class.
@@ -263,13 +263,13 @@ public class TestJobControl extends junit.framework.TestCase {
     JobConf jc = new JobConf();
     Job j = new Job(jc);
     //Just make sure no exception is thrown
-    assertNull(j.getAssignedJobID());
+    Assert.assertNull(j.getAssignedJobID());
     org.apache.hadoop.mapreduce.Job mockjob = mock(org.apache.hadoop.mapreduce.Job.class);
     org.apache.hadoop.mapreduce.JobID jid = new org.apache.hadoop.mapreduce.JobID("test",0);
     when(mockjob.getJobID()).thenReturn(jid);
     j.setJob(mockjob);
     JobID expected = new JobID("test",0);
-    assertEquals(expected, j.getAssignedJobID());
+    Assert.assertEquals(expected, j.getAssignedJobID());
     verify(mockjob).getJobID();
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapCollection.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapCollection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapCollection.java
index ecc01db..afe4a10 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapCollection.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMapCollection.java
@@ -17,9 +17,6 @@
  */
 
 package org.apache.hadoop.mapreduce;
-
-import junit.framework.TestCase;
-
 import java.io.IOException;
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -27,9 +24,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
-
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
 import org.junit.Test;
 import static org.junit.Assert.*;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestDelegatingInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestDelegatingInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestDelegatingInputFormat.java
index 1428e47..194cdeb 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestDelegatingInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestDelegatingInputFormat.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.mapreduce.lib.input;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.List;
-
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -30,9 +30,10 @@ import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 
-public class TestDelegatingInputFormat extends TestCase {
+public class TestDelegatingInputFormat {
 
   @SuppressWarnings("unchecked")
+  @Test
   public void testSplitting() throws Exception {
     Job job = Job.getInstance();
     MiniDFSCluster dfs = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java
index d86ddd0..da011a2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/jobcontrol/TestMapReduceJobControl.java
@@ -95,7 +95,7 @@ public class TestMapReduceJobControl extends HadoopTestCase {
     cjob2 = new ControlledJob(job2, dependingJobs);
 
     Job job3 = MapReduceTestUtil.createCopyJob(conf, outdir_3, 
-	                                   outdir_1, outdir_2);
+                                     outdir_1, outdir_2);
     dependingJobs = new ArrayList<ControlledJob>();
     dependingJobs.add(cjob1);
     dependingJobs.add(cjob2);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
index ae06812..14f123a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java
@@ -21,7 +21,10 @@ package org.apache.hadoop.mapreduce.lib.output;
 import java.io.*;
 import java.net.URI;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
@@ -38,7 +41,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 
 
-public class TestMRCJCFileOutputCommitter extends TestCase {
+public class TestMRCJCFileOutputCommitter {
   private static Path outDir = new Path(System.getProperty("test.build.data",
       "/tmp"), "output");
 
@@ -76,17 +79,18 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
     fs.delete(outDir, true);
   }
   
-  @Override
+  @Before
   public void setUp() throws IOException {
     cleanup();
   }
   
-  @Override
+  @After
   public void tearDown() throws IOException {
     cleanup();
   }
   
   @SuppressWarnings("unchecked")
+  @Test
   public void testCommitter() throws Exception {
     Job job = Job.getInstance();
     FileOutputFormat.setOutputPath(job, outDir);
@@ -122,7 +126,8 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
     assertEquals(output, expectedOutput.toString());
     FileUtil.fullyDelete(new File(outDir.toString()));
   }
-  
+
+  @Test
   public void testEmptyOutput() throws Exception {
     Job job = Job.getInstance();
     FileOutputFormat.setOutputPath(job, outDir);
@@ -146,6 +151,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
   }
 
   @SuppressWarnings("unchecked")
+  @Test
   public void testAbort() throws IOException, InterruptedException {
     Job job = Job.getInstance();
     FileOutputFormat.setOutputPath(job, outDir);
@@ -195,6 +201,7 @@ public class TestMRCJCFileOutputCommitter extends TestCase {
   }
 
   @SuppressWarnings("unchecked")
+  @Test
   public void testFailAbort() throws IOException, InterruptedException {
     Job job = Job.getInstance();
     Configuration conf = job.getConfiguration();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
index 508ded3..471c68f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
@@ -22,23 +22,30 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import org.junit.Assert;
 
-public class TestTaskContext extends TestCase {
-  
+public class TestTaskContext {
+
+  @Test
   public void testTaskContext() {
-    TaskContext context = new TaskContext(null, null, null, null, null, null, null);
+    TaskContext context = new TaskContext(null, null, null, null, null, null,
+        null);
     
     context.setInputKeyClass(IntWritable.class);
-    assertEquals(IntWritable.class.getName(), context.getInputKeyClass().getName());
+    Assert.assertEquals(IntWritable.class.getName(), context.getInputKeyClass
+        ().getName());
  
     context.setInputValueClass(Text.class);
-    assertEquals(Text.class.getName(), context.getInputValueClass().getName()); 
+    Assert.assertEquals(Text.class.getName(), context.getInputValueClass()
+        .getName());
    
     context.setOutputKeyClass(LongWritable.class);
-    assertEquals(LongWritable.class.getName(), context.getOutputKeyClass().getName()); 
+    Assert.assertEquals(LongWritable.class.getName(), context
+        .getOutputKeyClass().getName());
 
     context.setOutputValueClass(FloatWritable.class);
-    assertEquals(FloatWritable.class.getName(), context.getOutputValueClass().getName()); 
+    Assert.assertEquals(FloatWritable.class.getName(), context
+        .getOutputValueClass().getName());
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestInputBuffer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestInputBuffer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestInputBuffer.java
index 7eb6467..fa68364 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestInputBuffer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestInputBuffer.java
@@ -19,11 +19,12 @@ package org.apache.hadoop.mapred.nativetask.buffer;
 
 import java.io.IOException;
 
-import junit.framework.TestCase;
-
+import org.junit.Test;
 import org.junit.Assert;
 
-public class TestInputBuffer extends TestCase {
+public class TestInputBuffer {
+
+  @Test
   public void testInputBuffer() throws IOException {
     final int size = 100;
     final InputBuffer input1 = new InputBuffer(BufferType.DIRECT_BUFFER, size);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestOutputBuffer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestOutputBuffer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestOutputBuffer.java
index 39c25a6..af6693e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestOutputBuffer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestOutputBuffer.java
@@ -17,11 +17,12 @@
  */
 package org.apache.hadoop.mapred.nativetask.buffer;
 
-import junit.framework.TestCase;
-
+import org.junit.Test;
 import org.junit.Assert;
 
-public class TestOutputBuffer extends TestCase {
+public class TestOutputBuffer {
+
+  @Test
   public void testOutputBuffer() {
     final int size = 100;
     final OutputBuffer output1 = new OutputBuffer(BufferType.DIRECT_BUFFER, size);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
index fd5b100..1a7dace 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
@@ -20,7 +20,8 @@ package org.apache.hadoop.mapred.nativetask.serde;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
 
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.nativetask.Constants;
@@ -30,12 +31,11 @@ import org.apache.hadoop.mapred.nativetask.testutil.TestInput;
 import org.apache.hadoop.mapred.nativetask.testutil.TestInput.KV;
 import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
 import org.junit.Assert;
-import org.junit.Before;
 import org.mockito.Matchers;
 import org.mockito.Mockito;
 
 @SuppressWarnings({ "rawtypes", "unchecked" })
-public class TestKVSerializer extends TestCase {
+public class TestKVSerializer {
 
   int inputArraySize = 1000; // 1000 bytesWriable elements
   int bufferSize = 100; // bytes
@@ -46,7 +46,6 @@ public class TestKVSerializer extends TestCase {
   private SizedWritable value;
   private KVSerializer serializer;
 
-  @Override
   @Before
   public void setUp() throws IOException {
     this.inputArray = TestInput.getMapInputs(inputArraySize);
@@ -60,6 +59,7 @@ public class TestKVSerializer extends TestCase {
     serializer.updateLength(key, value);
   }
 
+  @Test
   public void testUpdateLength() throws IOException {
     Mockito.mock(DataOutputStream.class);
 
@@ -75,6 +75,7 @@ public class TestKVSerializer extends TestCase {
     }
   }
 
+  @Test
   public void testSerializeKV() throws IOException {
     final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
 
@@ -92,6 +93,7 @@ public class TestKVSerializer extends TestCase {
     Assert.assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH);
   }
 
+  @Test
   public void testSerializeNoFlush() throws IOException {
     final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
 
@@ -109,6 +111,7 @@ public class TestKVSerializer extends TestCase {
     Assert.assertEquals(written, key.length + value.length + Constants.SIZEOF_KV_LENGTH);
   }
 
+  @Test
   public void testSerializePartitionKV() throws IOException {
     final DataOutputStream dataOut = Mockito.mock(DataOutputStream.class);
 
@@ -130,12 +133,14 @@ public class TestKVSerializer extends TestCase {
         + Constants.SIZEOF_PARTITION_LENGTH);
   }
 
+  @Test
   public void testDeserializerNoData() throws IOException {
     final DataInputStream in = Mockito.mock(DataInputStream.class);
     Mockito.when(in.hasUnReadData()).thenReturn(false);
     Assert.assertEquals(0, serializer.deserializeKV(in, key, value));
   }
 
+  @Test
   public void testDeserializer() throws IOException {
     final DataInputStream in = Mockito.mock(DataInputStream.class);
     Mockito.when(in.hasUnReadData()).thenReturn(true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestReadWriteBuffer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestReadWriteBuffer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestReadWriteBuffer.java
index 6ea8092..584aedd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestReadWriteBuffer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestReadWriteBuffer.java
@@ -17,15 +17,16 @@
  */
 package org.apache.hadoop.mapred.nativetask.utils;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import org.junit.Assert;
 
 import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
-import org.junit.Assert;
 
-public class TestReadWriteBuffer extends TestCase {
+public class TestReadWriteBuffer {
 
   private static byte[] bytes = new byte[] { '0', 'a', 'b', 'c', 'd', '9' };
 
+  @Test
   public void testReadWriteBuffer() {
 
     final ReadWriteBuffer buffer = new ReadWriteBuffer();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestSizedWritable.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestSizedWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestSizedWritable.java
index 7b82eff..a6e43ed 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestSizedWritable.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/utils/TestSizedWritable.java
@@ -17,15 +17,16 @@
  */
 package org.apache.hadoop.mapred.nativetask.utils;
 
-import junit.framework.TestCase;
+import org.junit.Test;
 
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.nativetask.util.SizedWritable;
 import org.junit.Assert;
 
 @SuppressWarnings({ "rawtypes", "unchecked" })
-public class TestSizedWritable extends TestCase {
+public class TestSizedWritable {
 
+  @Test
   public void testSizedWritable() {
     final SizedWritable w = new SizedWritable(BytesWritable.class);
     Assert.assertTrue(w.length == SizedWritable.INVALID_LENGTH);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestBaileyBorweinPlouffe.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestBaileyBorweinPlouffe.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestBaileyBorweinPlouffe.java
index 3215bfe..2df2df0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestBaileyBorweinPlouffe.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestBaileyBorweinPlouffe.java
@@ -18,29 +18,35 @@
 package org.apache.hadoop.examples;
 
 import java.math.BigInteger;
+import org.junit.Test;
+import org.junit.Assert;
 
 /** Tests for BaileyBorweinPlouffe */
-public class TestBaileyBorweinPlouffe extends junit.framework.TestCase {
+public class TestBaileyBorweinPlouffe {
 
+  @Test
   public void testMod() {
     final BigInteger TWO = BigInteger.ONE.add(BigInteger.ONE);
     for(long n = 3; n < 100; n++) {
       for (long e = 1; e < 100; e++) {
         final long r = TWO.modPow(
             BigInteger.valueOf(e), BigInteger.valueOf(n)).longValue();
-        assertEquals("e=" + e + ", n=" + n, r, BaileyBorweinPlouffe.mod(e, n));
+        Assert.assertEquals("e=" + e + ", n=" + n, r, BaileyBorweinPlouffe
+            .mod(e, n));
       }
     }
   }
 
+  @Test
   public void testHexDigit() {
     final long[] answers = {0x43F6, 0xA308, 0x29B7, 0x49F1, 0x8AC8, 0x35EA};
     long d = 1;
     for(int i = 0; i < answers.length; i++) {
-      assertEquals("d=" + d, answers[i], BaileyBorweinPlouffe.hexDigits(d));
+      Assert.assertEquals("d=" + d, answers[i], BaileyBorweinPlouffe
+          .hexDigits(d));
       d *= 10;
     }
 
-    assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0));
+    Assert.assertEquals(0x243FL, BaileyBorweinPlouffe.hexDigits(0));
  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestLongLong.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestLongLong.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestLongLong.java
index 991121f..d6f284e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestLongLong.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestLongLong.java
@@ -19,24 +19,30 @@ package org.apache.hadoop.examples.pi.math;
 
 import java.math.BigInteger;
 import java.util.Random;
+import org.junit.Test;
+import org.junit.Assert;
 
-public class TestLongLong extends junit.framework.TestCase {
-  static final Random RAN = new Random(); 
+public class TestLongLong {
+
+  static final Random RAN = new Random();
   static final long MASK = (1L << (LongLong.SIZE >> 1)) - 1;
 
   static long nextPositiveLong() {
     return RAN.nextLong() & MASK;
   }
-  
+
   static void verifyMultiplication(long a, long b) {
     final LongLong ll = LongLong.multiplication(new LongLong(), a, b);
     final BigInteger bi = BigInteger.valueOf(a).multiply(BigInteger.valueOf(b));
 
-    final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
+    final String s = String.format(
+        "\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
+        b);
     //System.out.println(s);
-    assertEquals(s, bi, ll.toBigInteger());
+    Assert.assertEquals(s, bi, ll.toBigInteger());
   }
 
+  @Test
   public void testMultiplication() {
     for(int i = 0; i < 100; i++) {
       final long a = nextPositiveLong();
@@ -50,19 +56,24 @@ public class TestLongLong extends junit.framework.TestCase {
   static void verifyRightShift(long a, long b) {
     final LongLong ll = new LongLong().set(a, b);
     final BigInteger bi = ll.toBigInteger();
-    
-    for(int i = 0; i < LongLong.SIZE >> 1; i++) {
+
+    for (int i = 0; i < LongLong.SIZE >> 1; i++) {
       final long result = ll.shiftRight(i) & MASK;
       final long expected = bi.shiftRight(i).longValue() & MASK;
-      final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
-      assertEquals(s, expected, result);
+      final String s = String.format(
+          "\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
+          b);
+      Assert.assertEquals(s, expected, result);
     }
 
-    final String s = String.format("\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a, b);
+    final String s = String.format(
+        "\na = %x\nb = %x\nll= " + ll + "\nbi= " + bi.toString(16) + "\n", a,
+        b);
     //System.out.println(s);
-    assertEquals(s, bi, ll.toBigInteger());
+    Assert.assertEquals(s, bi, ll.toBigInteger());
   }
 
+  @Test
   public void testRightShift() {
     for(int i = 0; i < 1000; i++) {
       final long a = nextPositiveLong();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestModular.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestModular.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestModular.java
index 079eb7f..a75ec29 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestModular.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestModular.java
@@ -21,14 +21,16 @@ import java.math.BigInteger;
 import java.util.Random;
 
 import org.apache.hadoop.examples.pi.Util.Timer;
+import org.junit.Assert;
+import org.junit.Test;
 
-public class TestModular extends junit.framework.TestCase { 
-  private static final Random RANDOM = new Random(); 
+public class TestModular{
+  private static final Random RANDOM = new Random();
   private static final BigInteger TWO = BigInteger.valueOf(2);
 
 
   static final int DIV_VALID_BIT = 32;
-  static final long DIV_LIMIT = 1L << DIV_VALID_BIT; 
+  static final long DIV_LIMIT = 1L << DIV_VALID_BIT;
 
   // return r/n for n > r > 0
   static long div(long sum, long r, long n) {
@@ -36,7 +38,7 @@ public class TestModular extends junit.framework.TestCase {
     int i = DIV_VALID_BIT - 1;
     for(r <<= 1; r < n; r <<= 1) i--;
 //System.out.printf("  r=%d, n=%d, q=%d\n", r, n, q);
-    
+
     for(; i >= 0 ;) {
       r -= n;
       q |= (1L << i);
@@ -48,14 +50,15 @@ public class TestModular extends junit.framework.TestCase {
     sum += q;
     return sum < DIV_LIMIT? sum: sum - DIV_LIMIT;
   }
- 
+
+  @Test
   public void testDiv() {
     for(long n = 2; n < 100; n++)
       for(long r = 1; r < n; r++) {
         final long a = div(0, r, n);
         final long b = (long)((r*1.0/n) * (1L << DIV_VALID_BIT));
         final String s = String.format("r=%d, n=%d, a=%X, b=%X", r, n, a, b);
-        assertEquals(s, b, a);
+        Assert.assertEquals(s, b, a);
       }
   }
 
@@ -64,16 +67,16 @@ public class TestModular extends junit.framework.TestCase {
 
     for(int i = 0; i < rn.length; i++) {
       rn[i] = new long[rsize + 1][];
-      long n = RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL; 
+      long n = RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL;
       if (n <= 1) n = 0xFFFFFFFFFFFFFFFL - n;
       rn[i][0] = new long[]{n};
-      final BigInteger N = BigInteger.valueOf(n); 
+      final BigInteger N = BigInteger.valueOf(n);
 
       for(int j = 1; j < rn[i].length; j++) {
         long r = RANDOM.nextLong();
         if (r < 0) r = -r;
         if (r >= n) r %= n;
-        final BigInteger R = BigInteger.valueOf(r); 
+        final BigInteger R = BigInteger.valueOf(r);
         rn[i][j] = new long[]{r, R.multiply(R).mod(N).longValue()};
       }
     }
@@ -102,20 +105,20 @@ public class TestModular extends junit.framework.TestCase {
     } else {
       final int HALF = (63 - Long.numberOfLeadingZeros(n)) >> 1;
       final int FULL = HALF << 1;
-      final long ONES = (1 << HALF) - 1; 
-  
+      final long ONES = (1 << HALF) - 1;
+
       final long high = r >>> HALF;
       final long low  = r &= ONES;
 
       r *= r;
       if (r >= n) r %= n;
-  
+
       if (high != 0) {
         long s = high * high;
         if (s >= n) s %= n;
         for(int i = 0; i < FULL; i++)
           if ((s <<= 1) >= n) s -= n;
-        
+
         if (low == 0)
           r = s;
         else {
@@ -123,7 +126,7 @@ public class TestModular extends junit.framework.TestCase {
           if (t >= n) t %= n;
           for(int i = -1; i < HALF; i++)
             if ((t <<= 1) >= n) t -= n;
-          
+
           r += s;
           if (r >= n) r -= n;
           r += t;
@@ -133,7 +136,7 @@ public class TestModular extends junit.framework.TestCase {
     }
     return r;
   }
-  
+
   static void squareBenchmarks() {
     final Timer t = new Timer(false);
     t.tick("squareBenchmarks(), MAX_SQRT=" + Modular.MAX_SQRT_LONG);
@@ -147,8 +150,11 @@ public class TestModular extends junit.framework.TestCase {
         final long r = rn[i][j][0];
         final long answer = rn[i][j][1];
         final long s = square_slow(r, n);
-        if (s != answer)
-          assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("square_slow");
@@ -161,8 +167,11 @@ public class TestModular extends junit.framework.TestCase {
         final long r = rn[i][j][0];
         final long answer = rn[i][j][1];
         final long s = square(r, n, r2p64);
-        if (s != answer)
-          assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("square");
@@ -175,8 +184,11 @@ public class TestModular extends junit.framework.TestCase {
         final long answer = rn[i][j][1];
         final BigInteger R = BigInteger.valueOf(r);
         final long s = R.multiply(R).mod(N).longValue();
-        if (s != answer)
-          assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("R.multiply(R).mod(N)");
@@ -189,8 +201,11 @@ public class TestModular extends junit.framework.TestCase {
         final long answer = rn[i][j][1];
         final BigInteger R = BigInteger.valueOf(r);
         final long s = R.modPow(TWO, N).longValue();
-        if (s != answer)
-          assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("R.modPow(TWO, N)");
@@ -201,15 +216,15 @@ public class TestModular extends junit.framework.TestCase {
 
     for(int i = 0; i < en.length; i++) {
       en[i] = new long[esize + 1][];
-      long n = (RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL) | 1L; 
+      long n = (RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL) | 1L;
       if (n == 1) n = 3;
       en[i][0] = new long[]{n};
-      final BigInteger N = BigInteger.valueOf(n); 
+      final BigInteger N = BigInteger.valueOf(n);
 
       for(int j = 1; j < en[i].length; j++) {
         long e = RANDOM.nextLong();
         if (e < 0) e = -e;
-        final BigInteger E = BigInteger.valueOf(e); 
+        final BigInteger E = BigInteger.valueOf(e);
         en[i][j] = new long[]{e, TWO.modPow(E, N).longValue()};
       }
     }
@@ -253,10 +268,10 @@ public class TestModular extends junit.framework.TestCase {
   static class Montgomery2 extends Montgomery {
     /** Compute 2^y mod N for N odd. */
     long mod2(final long y) {
-      long r0 = R - N; 
+      long r0 = R - N;
       long r1 = r0 << 1;
       if (r1 >= N) r1 -= N;
-      
+
       for(long mask = Long.highestOneBit(y); mask > 0; mask >>>= 1) {
         if ((mask & y) == 0) {
           r1 = product.m(r0, r1);
@@ -269,7 +284,7 @@ public class TestModular extends junit.framework.TestCase {
       return product.m(r0, 1);
     }
   }
-  
+
   static void modBenchmarks() {
     final Timer t = new Timer(false);
     t.tick("modBenchmarks()");
@@ -283,12 +298,15 @@ public class TestModular extends junit.framework.TestCase {
         final long e = en[i][j][0];
         final long answer = en[i][j][1];
         final long s = Modular.mod(e, n);
-        if (s != answer)
-          assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("Modular.mod");
-    
+
     final Montgomery2 m2 = new Montgomery2();
     for(int i = 0; i < en.length; i++) {
       final long n = en[i][0][0];
@@ -297,8 +315,11 @@ public class TestModular extends junit.framework.TestCase {
         final long e = en[i][j][0];
         final long answer = en[i][j][1];
         final long s = m2.mod(e);
-        if (s != answer)
-          assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("montgomery.mod");
@@ -310,21 +331,27 @@ public class TestModular extends junit.framework.TestCase {
         final long e = en[i][j][0];
         final long answer = en[i][j][1];
         final long s = m2.mod2(e);
-        if (s != answer)
-          assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("montgomery.mod2");
 
     for(int i = 0; i < en.length; i++) {
       final long n = en[i][0][0];
-      final BigInteger N = BigInteger.valueOf(n); 
+      final BigInteger N = BigInteger.valueOf(n);
       for(int j = 1; j < en[i].length; j++) {
         final long e = en[i][j][0];
         final long answer = en[i][j][1];
         final long s = TWO.modPow(BigInteger.valueOf(e), N).longValue();
-        if (s != answer)
-          assertEquals("e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
+        if (s != answer) {
+          Assert.assertEquals(
+              "e=" + e + ", n=" + n + ", answer=" + answer + " but s=" + s,
+              answer, s);
+        }
       }
     }
     t.tick("BigInteger.modPow(e, n)");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestSummation.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestSummation.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestSummation.java
index e80b9bb..2741962 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestSummation.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/pi/math/TestSummation.java
@@ -28,14 +28,19 @@ import org.apache.hadoop.examples.pi.Container;
 import org.apache.hadoop.examples.pi.Util;
 import org.apache.hadoop.examples.pi.Util.Timer;
 import org.apache.hadoop.examples.pi.math.TestModular.Montgomery2;
+import org.junit.Test;
+import org.junit.Assert;
 
-public class TestSummation extends junit.framework.TestCase {
+public class TestSummation {
   static final Random RANDOM = new Random();
   static final BigInteger TWO = BigInteger.valueOf(2);
+  private static final double DOUBLE_DELTA = 0.000000001f;
 
   private static Summation2 newSummation(final long base, final long range, final long delta) {
-    final ArithmeticProgression N = new ArithmeticProgression('n', base+3, delta, base+3+range);
-    final ArithmeticProgression E = new ArithmeticProgression('e', base+range, -delta, base);
+    final ArithmeticProgression N = new ArithmeticProgression('n', base + 3,
+        delta, base + 3 + range);
+    final ArithmeticProgression E = new ArithmeticProgression('e', base + range,
+        -delta, base);
     return new Summation2(N, E);
   }
 
@@ -53,10 +58,11 @@ public class TestSummation extends junit.framework.TestCase {
 
     final List<Summation> combined = Util.combine(a);
 //    Util.out.println("combined=" + combined);
-    assertEquals(1, combined.size());
-    assertEquals(sigma, combined.get(0));
+    Assert.assertEquals(1, combined.size());
+    Assert.assertEquals(sigma, combined.get(0));
   }
 
+  @Test
   public void testSubtract() {
     final Summation sigma = newSummation(3, 10000, 20);
     final int size = 10;
@@ -112,7 +118,9 @@ public class TestSummation extends junit.framework.TestCase {
       long n = N.value;
       double s = 0;
       for(; e > E.limit; e += E.delta) {
-        s = Modular.addMod(s, TWO.modPow(BigInteger.valueOf(e), BigInteger.valueOf(n)).doubleValue()/n);
+        s = Modular.addMod(s,
+            TWO.modPow(BigInteger.valueOf(e), BigInteger.valueOf(n))
+                .doubleValue() / n);
         n += N.delta;
       }
       return s;
@@ -124,16 +132,16 @@ public class TestSummation extends junit.framework.TestCase {
     t.tick("sigma=" + sigma);
     final double value = sigma.compute();
     t.tick("compute=" + value);
-    assertEquals(value, sigma.compute_modular());
+    Assert.assertEquals(value, sigma.compute_modular(), DOUBLE_DELTA);
     t.tick("compute_modular");
-    assertEquals(value, sigma.compute_montgomery());
+    Assert.assertEquals(value, sigma.compute_montgomery(), DOUBLE_DELTA);
     t.tick("compute_montgomery");
-    assertEquals(value, sigma.compute_montgomery2());
+    Assert.assertEquals(value, sigma.compute_montgomery2(), DOUBLE_DELTA);
     t.tick("compute_montgomery2");
 
-    assertEquals(value, sigma.compute_modBigInteger());
+    Assert.assertEquals(value, sigma.compute_modBigInteger(), DOUBLE_DELTA);
     t.tick("compute_modBigInteger");
-    assertEquals(value, sigma.compute_modPow());
+    Assert.assertEquals(value, sigma.compute_modPow(), DOUBLE_DELTA);
     t.tick("compute_modPow");
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/TestDataJoin.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/TestDataJoin.java b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/TestDataJoin.java
index dbb8ef0..2daae2e 100644
--- a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/TestDataJoin.java
+++ b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/TestDataJoin.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.contrib.utils.join;
 
 import java.io.IOException;
 
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-import junit.extensions.TestSetup;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -36,24 +36,27 @@ import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.*;
 
-public class TestDataJoin extends TestCase {
-
+/**
+ * Class to test JOIN between 2 data
+ * sources.
+ */
+public class TestDataJoin {
   private static MiniDFSCluster cluster = null;
-  public static Test suite() {
-    TestSetup setup = new TestSetup(new TestSuite(TestDataJoin.class)) {
-      protected void setUp() throws Exception {
-        Configuration conf = new Configuration();
-        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
-      }
-      protected void tearDown() throws Exception {
-        if (cluster != null) {
-          cluster.shutdown();
-        }
-      }
-    };
-    return setup;
+
+  @Before
+  public void setUp() throws Exception {
+    Configuration conf = new Configuration();
+    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
   }
 
+  @Test
   public void testDataJoin() throws Exception {
     final int srcs = 4;
     JobConf job = new JobConf();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java b/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
index 3704c5b..3e52b3c 100644
--- a/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
+++ b/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
@@ -38,11 +38,13 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
+import org.junit.Assert;
+import org.junit.Test;
 import org.slf4j.event.Level;
 
 import static org.slf4j.LoggerFactory.getLogger;
 
-public class TestDistCh extends junit.framework.TestCase {
+public class TestDistCh {
   {
     GenericTestUtils.setLogLevel(
         getLogger("org.apache.hadoop.hdfs.StateChange"), Level.ERROR);
@@ -75,20 +77,20 @@ public class TestDistCh extends junit.framework.TestCase {
 
     Path createSmallFile(Path dir) throws IOException {
       final Path f = new Path(dir, "f" + ++fcount);
-      assertTrue(!fs.exists(f));
+      Assert.assertTrue(!fs.exists(f));
       final DataOutputStream out = fs.create(f);
       try {
         out.writeBytes("createSmallFile: f=" + f);
       } finally {
         out.close();
       }
-      assertTrue(fs.exists(f));
+      Assert.assertTrue(fs.exists(f));
       return f;
     }
 
     Path mkdir(Path dir) throws IOException {
-      assertTrue(fs.mkdirs(dir));
-      assertTrue(fs.getFileStatus(dir).isDirectory());
+      Assert.assertTrue(fs.mkdirs(dir));
+      Assert.assertTrue(fs.getFileStatus(dir).isDirectory());
       return dir;
     }
     
@@ -127,7 +129,8 @@ public class TestDistCh extends junit.framework.TestCase {
       defaultPerm = permission == null || "".equals(permission);
     }
   }
-  
+
+  @Test
   public void testDistCh() throws Exception {
     final Configuration conf = new Configuration();
 
@@ -190,13 +193,13 @@ public class TestDistCh extends junit.framework.TestCase {
   }
 
   static void checkFileStatus(ChPermissionStatus expected, FileStatus actual) {
-    assertEquals(expected.getUserName(), actual.getOwner());
-    assertEquals(expected.getGroupName(), actual.getGroup());
+    Assert.assertEquals(expected.getUserName(), actual.getOwner());
+    Assert.assertEquals(expected.getGroupName(), actual.getGroup());
     FsPermission perm = expected.getPermission();
     if (actual.isFile() && expected.defaultPerm) {
       perm = perm.applyUMask(UMASK);
     }
-    assertEquals(perm, actual.getPermission());
+    Assert.assertEquals(perm, actual.getPermission());
   }
 
   private static String runLsr(final FsShell shell, String root, int returnvalue
@@ -210,7 +213,7 @@ public class TestDistCh extends junit.framework.TestCase {
     System.setErr(out);
     final String results;
     try {
-      assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
+      Assert.assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
       results = bytes.toString();
     } finally {
       IOUtils.closeStream(out);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java
index 3a81e42..1ca5436 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestTypedBytesWritable.java
@@ -26,10 +26,12 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestTypedBytesWritable extends TestCase {
+public class TestTypedBytesWritable {
 
+  @Test
   public void testToString() {
     TypedBytesWritable tbw = new TypedBytesWritable();
     tbw.setValue(true);
@@ -46,6 +48,7 @@ public class TestTypedBytesWritable extends TestCase {
     assertEquals("random text", tbw.toString());
   }
 
+  @Test
   public void testIO() throws IOException {
     TypedBytesWritable tbw = new TypedBytesWritable();
     tbw.setValue(12345);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilterInitializer.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilterInitializer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilterInitializer.java
index 63c2cf3..07478ca 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilterInitializer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilterInitializer.java
@@ -22,20 +22,23 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
 import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-import org.junit.Test;
 
-public class TestAmFilterInitializer extends TestCase {
+/**
+ * Test class for {@Link AmFilterInitializer}.
+ */
+public class TestAmFilterInitializer {
 
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
+  @Before
+  public void setUp() throws Exception {
     NetUtils.addStaticResolution("host1", "172.0.0.1");
     NetUtils.addStaticResolution("host2", "172.0.0.1");
     NetUtils.addStaticResolution("host3", "172.0.0.1");


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


[2/2] hadoop git commit: HADOOP-14729. Upgrade JUnit 3 test cases to JUnit 4. Contributed by Ajay Kumar.

Posted by ar...@apache.org.
HADOOP-14729. Upgrade JUnit 3 test cases to JUnit 4. Contributed by Ajay Kumar.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8b7cbe38
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8b7cbe38
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8b7cbe38

Branch: refs/heads/trunk
Commit: 8b7cbe3840f1a4f4dc038ac3018a4f0dbe3bc82d
Parents: 3a4e861
Author: Arpit Agarwal <ar...@apache.org>
Authored: Fri Aug 25 09:55:46 2017 -0700
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Fri Aug 25 09:55:46 2017 -0700

----------------------------------------------------------------------
 .../apache/hadoop/conf/TestConfiguration.java   | 340 ++++++++++++-------
 .../hadoop/conf/TestConfigurationSubclass.java  |   8 +-
 .../apache/hadoop/conf/TestDeprecatedKeys.java  |   8 +-
 .../apache/hadoop/conf/TestGetInstances.java    |   8 +-
 .../org/apache/hadoop/fs/TestAvroFSInput.java   |   6 +-
 .../test/java/org/apache/hadoop/fs/TestDU.java  |  19 +-
 .../java/org/apache/hadoop/fs/TestFilterFs.java |   8 +-
 .../hadoop/fs/TestGetFileBlockLocations.java    |  25 +-
 .../org/apache/hadoop/fs/TestGlobExpander.java  |   7 +-
 .../java/org/apache/hadoop/fs/TestTrash.java    |  24 +-
 .../apache/hadoop/fs/TestTruncatedInputBug.java |   6 +-
 .../hadoop/fs/permission/TestFsPermission.java  |  14 +-
 .../apache/hadoop/ipc/TestFairCallQueue.java    |  33 +-
 .../org/apache/hadoop/log/TestLog4Json.java     |   6 +-
 .../hadoop/net/TestScriptBasedMapping.java      |   9 +-
 .../TestScriptBasedMappingWithDependency.java   |   7 +-
 .../security/TestAuthenticationFilter.java      |   6 +-
 .../TestAuthenticationWithProxyUserFilter.java  |   7 +-
 .../security/TestWhitelistBasedResolver.java    |   9 +-
 .../apache/hadoop/security/token/TestToken.java |  12 +-
 .../hadoop/util/TestAsyncDiskService.java       |   6 +-
 .../apache/hadoop/util/TestCacheableIPList.java |  13 +-
 .../apache/hadoop/util/TestFileBasedIPList.java |  12 +-
 .../org/apache/hadoop/util/TestFindClass.java   |   2 +-
 .../apache/hadoop/util/TestGenericsUtil.java    |  12 +-
 .../org/apache/hadoop/util/TestIndexedSort.java |   7 +-
 .../hadoop/util/TestNativeLibraryChecker.java   |   8 +-
 .../mapred/TestMRWithDistributedCache.java      |  40 +--
 .../hadoop/mapred/TestFileOutputCommitter.java  |  23 +-
 .../apache/hadoop/mapred/TestIndexCache.java    |  15 +-
 .../hadoop/mapred/TestJobEndNotifier.java       |  12 +-
 .../mapreduce/TestJobMonitorAndPrint.java       |   8 +-
 .../lib/output/TestFileOutputCommitter.java     |  39 ++-
 .../lib/output/TestFileOutputFormat.java        |   7 +-
 .../mapred/jobcontrol/TestJobControl.java       |   6 +-
 .../hadoop/mapreduce/TestMapCollection.java     |   6 -
 .../lib/input/TestDelegatingInputFormat.java    |   7 +-
 .../lib/jobcontrol/TestMapReduceJobControl.java |   2 +-
 .../output/TestMRCJCFileOutputCommitter.java    |  17 +-
 .../mapred/nativetask/TestTaskContext.java      |  23 +-
 .../nativetask/buffer/TestInputBuffer.java      |   7 +-
 .../nativetask/buffer/TestOutputBuffer.java     |   7 +-
 .../nativetask/serde/TestKVSerializer.java      |  13 +-
 .../nativetask/utils/TestReadWriteBuffer.java   |   7 +-
 .../nativetask/utils/TestSizedWritable.java     |   5 +-
 .../examples/TestBaileyBorweinPlouffe.java      |  14 +-
 .../hadoop/examples/pi/math/TestLongLong.java   |  33 +-
 .../hadoop/examples/pi/math/TestModular.java    | 105 +++---
 .../hadoop/examples/pi/math/TestSummation.java  |  30 +-
 .../hadoop/contrib/utils/join/TestDataJoin.java |  41 +--
 .../org/apache/hadoop/tools/TestDistCh.java     |  23 +-
 .../typedbytes/TestTypedBytesWritable.java      |   7 +-
 .../amfilter/TestAmFilterInitializer.java       |  15 +-
 53 files changed, 718 insertions(+), 416 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
index 8fe88bc..b41a807 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
@@ -44,7 +44,11 @@ import java.util.regex.Pattern;
 import static java.util.concurrent.TimeUnit.*;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import junit.framework.TestCase;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import static org.junit.Assert.assertArrayEquals;
 
 import org.apache.commons.lang.StringUtils;
@@ -65,11 +69,11 @@ import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.Logger;
 import org.apache.log4j.spi.LoggingEvent;
 import org.hamcrest.CoreMatchers;
-import org.junit.Assert;
 import org.mockito.Mockito;
 
-public class TestConfiguration extends TestCase {
+public class TestConfiguration {
 
+  private static final double DOUBLE_DELTA = 0.000000001f;
   private Configuration conf;
   final static String CONFIG = new File("./test-config-TestConfiguration.xml").getAbsolutePath();
   final static String CONFIG2 = new File("./test-config2-TestConfiguration.xml").getAbsolutePath();
@@ -82,7 +86,7 @@ public class TestConfiguration extends TestCase {
   private static final String CONFIG_MULTI_BYTE_SAVED = new File(
     "./test-config-multi-byte-saved-TestConfiguration.xml").getAbsolutePath();
   final static Random RAN = new Random();
-  final static String XMLHEADER = 
+  final static String XMLHEADER =
             IBM_JAVA?"<?xml version=\"1.0\" encoding=\"UTF-8\"?><configuration>":
   "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>";
 
@@ -94,18 +98,16 @@ public class TestConfiguration extends TestCase {
 
   private BufferedWriter out;
 
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
+  @Before
+  public void setUp() throws Exception {
     conf = new Configuration();
   }
-  
-  @Override
-  protected void tearDown() throws Exception {
+
+  @After
+  public void tearDown() throws Exception {
     if(out != null) {
       out.close();
     }
-    super.tearDown();
     new File(CONFIG).delete();
     new File(CONFIG2).delete();
     new File(CONFIG_FOR_ENUM).delete();
@@ -113,7 +115,7 @@ public class TestConfiguration extends TestCase {
     new File(CONFIG_MULTI_BYTE).delete();
     new File(CONFIG_MULTI_BYTE_SAVED).delete();
   }
-  
+
   private void startConfig() throws IOException{
     out.write("<?xml version=\"1.0\"?>\n");
     out.write("<configuration>\n");
@@ -164,6 +166,7 @@ public class TestConfiguration extends TestCase {
         + " [\n<!ENTITY " + entity + " SYSTEM \"" + value + "\">\n]>");
   }
 
+  @Test
   public void testInputStreamResource() throws Exception {
     StringWriter writer = new StringWriter();
     out = new BufferedWriter(writer);
@@ -182,6 +185,7 @@ public class TestConfiguration extends TestCase {
     assertEquals("A", conf.get("prop"));
   }
 
+  @Test
   public void testFinalWarnings() throws Exception {
     // Make a configuration file with a final property
     StringWriter writer = new StringWriter();
@@ -226,6 +230,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testNoFinalWarnings() throws Exception {
     // Make a configuration file with a final property
     StringWriter writer = new StringWriter();
@@ -263,6 +268,7 @@ public class TestConfiguration extends TestCase {
 
 
 
+  @Test
   public void testFinalWarningsMultiple() throws Exception {
     // Make a configuration file with a repeated final property
     StringWriter writer = new StringWriter();
@@ -296,6 +302,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testFinalWarningsMultipleOverride() throws Exception {
     // Make a configuration file with 2 final properties with different values
     StringWriter writer = new StringWriter();
@@ -358,6 +365,7 @@ public class TestConfiguration extends TestCase {
    * round-trips multi-byte string literals through saving and loading of config
    * and asserts that the same values were read.
    */
+  @Test
   public void testMultiByteCharacters() throws IOException {
     String priorDefaultEncoding = System.getProperty("file.encoding");
     try {
@@ -388,6 +396,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testVariableSubstitution() throws IOException {
     // stubbing only environment dependent functions
     Configuration mock = Mockito.spy(conf);
@@ -419,12 +428,13 @@ public class TestConfiguration extends TestCase {
       assertEq(p.val, gotRawVal);
       assertEq(p.expectEval, gotVal);
     }
-      
+
     // check that expansion also occurs for getInt()
     assertTrue(mock.getInt("intvar", -1) == 42);
     assertTrue(mock.getInt("my.int", -1) == 42);
   }
 
+  @Test
   public void testEnvDefault() throws IOException {
     Configuration mock = Mockito.spy(conf);
     Mockito.when(mock.getenv("NULL_VALUE")).thenReturn(null);
@@ -465,6 +475,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testFinalParam() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -474,7 +485,7 @@ public class TestConfiguration extends TestCase {
     Configuration conf1 = new Configuration();
     conf1.addResource(fileResource);
     assertNull("my var is not null", conf1.get("my.var"));
-	
+
     out=new BufferedWriter(new FileWriter(CONFIG2));
     startConfig();
     declareProperty("my.var", "myval", "myval", false);
@@ -486,6 +497,7 @@ public class TestConfiguration extends TestCase {
     assertNull("my var is not final", conf2.get("my.var"));
   }
 
+  @Test
   public void testCompactFormat() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -530,8 +542,8 @@ public class TestConfiguration extends TestCase {
   }
 
   void declareProperty(String name, String val, String expectEval,
-                       boolean isFinal)
-    throws IOException {
+      boolean isFinal)
+      throws IOException {
     appendProperty(name, val, isFinal);
     Prop p = new Prop();
     p.name = name;
@@ -543,10 +555,10 @@ public class TestConfiguration extends TestCase {
   void appendProperty(String name, String val) throws IOException {
     appendProperty(name, val, false);
   }
- 
-  void appendProperty(String name, String val, boolean isFinal, 
+
+  void appendProperty(String name, String val, boolean isFinal,
       String ... sources)
-    throws IOException {
+      throws IOException {
     out.write("<property>");
     out.write("<name>");
     out.write(name);
@@ -564,19 +576,19 @@ public class TestConfiguration extends TestCase {
     }
     out.write("</property>\n");
   }
-  
+
   void appendCompactFormatProperty(String name, String val) throws IOException {
     appendCompactFormatProperty(name, val, false);
   }
 
   void appendCompactFormatProperty(String name, String val, boolean isFinal)
-    throws IOException {
+      throws IOException {
     appendCompactFormatProperty(name, val, isFinal, null);
   }
 
   void appendCompactFormatProperty(String name, String val, boolean isFinal,
       String source)
-    throws IOException {
+      throws IOException {
     out.write("<property ");
     out.write("name=\"");
     out.write(name);
@@ -595,6 +607,7 @@ public class TestConfiguration extends TestCase {
     out.write("/>\n");
   }
 
+  @Test
   public void testOverlay() throws IOException{
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -610,25 +623,26 @@ public class TestConfiguration extends TestCase {
     appendProperty("b","d");
     appendProperty("e","e");
     endConfig();
-    
+
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    
+
     //set dynamically something
     conf.set("c","d");
     conf.set("a","d");
-    
+
     Configuration clone=new Configuration(conf);
     clone.addResource(new Path(CONFIG2));
-    
-    assertEquals(clone.get("a"), "d"); 
-    assertEquals(clone.get("b"), "d"); 
-    assertEquals(clone.get("c"), "d"); 
-    assertEquals(clone.get("d"), "e"); 
-    assertEquals(clone.get("e"), "f"); 
-    
-  }
-  
+
+    assertEquals(clone.get("a"), "d");
+    assertEquals(clone.get("b"), "d");
+    assertEquals(clone.get("c"), "d");
+    assertEquals(clone.get("d"), "e");
+    assertEquals(clone.get("e"), "f");
+
+  }
+
+  @Test
   public void testCommentsInValue() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -640,6 +654,7 @@ public class TestConfiguration extends TestCase {
     assertEquals("this  contains a comment", conf.get("my.comment"));
   }
 
+  @Test
   public void testEscapedCharactersInValue() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -651,6 +666,7 @@ public class TestConfiguration extends TestCase {
     assertEquals("''''", conf.get("my.comment"));
   }
 
+  @Test
   public void testTrim() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -658,13 +674,13 @@ public class TestConfiguration extends TestCase {
     String[] name = new String[100];
     for(int i = 0; i < name.length; i++) {
       name[i] = "foo" + i;
-      StringBuilder prefix = new StringBuilder(); 
-      StringBuilder postfix = new StringBuilder(); 
+      StringBuilder prefix = new StringBuilder();
+      StringBuilder postfix = new StringBuilder();
       for(int j = 0; j < 3; j++) {
         prefix.append(whitespaces[RAN.nextInt(whitespaces.length)]);
         postfix.append(whitespaces[RAN.nextInt(whitespaces.length)]);
       }
-      
+
       appendProperty(prefix + name[i] + postfix, name[i] + ".value");
     }
     endConfig();
@@ -675,6 +691,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testGetLocalPath() throws IOException {
     Configuration conf = new Configuration();
     String[] dirs = new String[]{"a", "b", "c"};
@@ -690,7 +707,8 @@ public class TestConfiguration extends TestCase {
         localPath.contains(" "));
     }
   }
-  
+
+  @Test
   public void testGetFile() throws IOException {
     Configuration conf = new Configuration();
     String[] dirs = new String[]{"a", "b", "c"};
@@ -707,29 +725,32 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testToString() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    
-    String expectedOutput = 
-      "Configuration: core-default.xml, core-site.xml, " + 
+
+    String expectedOutput =
+      "Configuration: core-default.xml, core-site.xml, " +
       fileResource.toString();
     assertEquals(expectedOutput, conf.toString());
   }
-  
+
+  @Test
   public void testWriteXml() throws IOException {
     Configuration conf = new Configuration();
-    ByteArrayOutputStream baos = new ByteArrayOutputStream(); 
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
     conf.writeXml(baos);
     String result = baos.toString();
     assertTrue("Result has proper header", result.startsWith(XMLHEADER));
-	  
+
     assertTrue("Result has proper footer", result.endsWith("</configuration>"));
   }
-  
+
+  @Test
   public void testIncludes() throws Exception {
     tearDown();
     System.out.println("XXX testIncludes");
@@ -758,8 +779,8 @@ public class TestConfiguration extends TestCase {
     // verify that the includes file contains all properties
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    assertEquals(conf.get("a"), "b"); 
-    assertEquals(conf.get("c"), "d"); 
+    assertEquals(conf.get("a"), "b");
+    assertEquals(conf.get("c"), "d");
     assertEquals(conf.get("e"), "f");
     assertEquals(conf.get("g"), "h");
     assertEquals(conf.get("i"), "j");
@@ -767,6 +788,7 @@ public class TestConfiguration extends TestCase {
     tearDown();
   }
 
+  @Test
   public void testCharsetInDocumentEncoding() throws Exception {
     tearDown();
     out=new BufferedWriter(new OutputStreamWriter(new FileOutputStream(CONFIG),
@@ -785,6 +807,7 @@ public class TestConfiguration extends TestCase {
     tearDown();
   }
 
+  @Test
   public void testEntityReference() throws Exception {
     tearDown();
     out=new BufferedWriter(new FileWriter(CONFIG));
@@ -803,6 +826,7 @@ public class TestConfiguration extends TestCase {
     tearDown();
   }
 
+  @Test
   public void testSystemEntityReference() throws Exception {
     tearDown();
     out=new BufferedWriter(new FileWriter(CONFIG2));
@@ -824,6 +848,7 @@ public class TestConfiguration extends TestCase {
     tearDown();
   }
 
+  @Test
   public void testIncludesWithFallback() throws Exception {
     tearDown();
     out=new BufferedWriter(new FileWriter(CONFIG2));
@@ -862,6 +887,7 @@ public class TestConfiguration extends TestCase {
     tearDown();
   }
 
+  @Test
   public void testRelativeIncludes() throws Exception {
     tearDown();
     String relConfig = new File("./tmp/test-config.xml").getAbsolutePath();
@@ -893,6 +919,7 @@ public class TestConfiguration extends TestCase {
     new File(new File(relConfig).getParent()).delete();
   }
 
+  @Test
   public void testIntegerRanges() {
     Configuration conf = new Configuration();
     conf.set("first", "-100");
@@ -923,7 +950,8 @@ public class TestConfiguration extends TestCase {
     assertEquals(true, range.isIncluded(34));
     assertEquals(true, range.isIncluded(100000000));
   }
-  
+
+  @Test
   public void testGetRangeIterator() throws Exception {
     Configuration config = new Configuration(false);
     IntegerRanges ranges = config.getRange("Test", "");
@@ -943,7 +971,7 @@ public class TestConfiguration extends TestCase {
       found.add(i);
     }
     assertEquals(expected, found);
-    
+
     ranges = config.getRange("Test", "8-12, 5- 7");
     expected = new HashSet<Integer>(Arrays.asList(5,6,7,8,9,10,11,12));
     found = new HashSet<Integer>();
@@ -953,6 +981,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(expected, found);
   }
 
+  @Test
   public void testHexValues() throws IOException{
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -984,6 +1013,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testIntegerValues() throws IOException{
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1014,7 +1044,8 @@ public class TestConfiguration extends TestCase {
       // pass
     }
   }
-  
+
+  @Test
   public void testHumanReadableValues() throws IOException {
     out = new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1035,6 +1066,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testBooleanValues() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1058,7 +1090,8 @@ public class TestConfiguration extends TestCase {
     assertEquals(false, conf.getBoolean("test.bool7", true));
     assertEquals(false, conf.getBoolean("test.bool8", false));
   }
-  
+
+  @Test
   public void testFloatValues() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1070,10 +1103,10 @@ public class TestConfiguration extends TestCase {
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    assertEquals(3.1415f, conf.getFloat("test.float1", 0.0f));
-    assertEquals(3.1415f, conf.getFloat("test.float2", 0.0f));
-    assertEquals(-3.1415f, conf.getFloat("test.float3", 0.0f));
-    assertEquals(-3.1415f, conf.getFloat("test.float4", 0.0f));
+    assertEquals(3.1415f, conf.getFloat("test.float1", 0.0f), DOUBLE_DELTA);
+    assertEquals(3.1415f, conf.getFloat("test.float2", 0.0f), DOUBLE_DELTA);
+    assertEquals(-3.1415f, conf.getFloat("test.float3", 0.0f), DOUBLE_DELTA);
+    assertEquals(-3.1415f, conf.getFloat("test.float4", 0.0f), DOUBLE_DELTA);
     try {
       conf.getFloat("test.float5", 0.0f);
       fail("Property had invalid float value, but was read successfully.");
@@ -1081,7 +1114,8 @@ public class TestConfiguration extends TestCase {
       // pass
     }
   }
-  
+
+  @Test
   public void testDoubleValues() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1093,10 +1127,10 @@ public class TestConfiguration extends TestCase {
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    assertEquals(3.1415, conf.getDouble("test.double1", 0.0));
-    assertEquals(3.1415, conf.getDouble("test.double2", 0.0));
-    assertEquals(-3.1415, conf.getDouble("test.double3", 0.0));
-    assertEquals(-3.1415, conf.getDouble("test.double4", 0.0));
+    assertEquals(3.1415, conf.getDouble("test.double1", 0.0), DOUBLE_DELTA);
+    assertEquals(3.1415, conf.getDouble("test.double2", 0.0), DOUBLE_DELTA);
+    assertEquals(-3.1415, conf.getDouble("test.double3", 0.0), DOUBLE_DELTA);
+    assertEquals(-3.1415, conf.getDouble("test.double4", 0.0), DOUBLE_DELTA);
     try {
       conf.getDouble("test.double5", 0.0);
       fail("Property had invalid double value, but was read successfully.");
@@ -1104,7 +1138,8 @@ public class TestConfiguration extends TestCase {
       // pass
     }
   }
-  
+
+  @Test
   public void testGetClass() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1113,10 +1148,13 @@ public class TestConfiguration extends TestCase {
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    assertEquals("java.lang.Integer", conf.getClass("test.class1", null).getCanonicalName());
-    assertEquals("java.lang.Integer", conf.getClass("test.class2", null).getCanonicalName());
+    assertEquals("java.lang.Integer",
+        conf.getClass("test.class1", null).getCanonicalName());
+    assertEquals("java.lang.Integer",
+        conf.getClass("test.class2", null).getCanonicalName());
   }
-  
+
+  @Test
   public void testGetClasses() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1132,14 +1170,15 @@ public class TestConfiguration extends TestCase {
     assertArrayEquals(expectedNames, extractClassNames(classes1));
     assertArrayEquals(expectedNames, extractClassNames(classes2));
   }
-  
+
+  @Test
   public void testGetStringCollection() {
     Configuration c = new Configuration();
     c.set("x", " a, b\n,\nc ");
     Collection<String> strs = c.getTrimmedStringCollection("x");
     assertEquals(3, strs.size());
     assertArrayEquals(new String[]{ "a", "b", "c" },
-                      strs.toArray(new String[0]));
+        strs.toArray(new String[0]));
 
     // Check that the result is mutable
     strs.add("z");
@@ -1150,13 +1189,14 @@ public class TestConfiguration extends TestCase {
     strs.add("z");
   }
 
+  @Test
   public void testGetTrimmedStringCollection() {
     Configuration c = new Configuration();
     c.set("x", "a, b, c");
     Collection<String> strs = c.getStringCollection("x");
     assertEquals(3, strs.size());
     assertArrayEquals(new String[]{ "a", " b", " c" },
-                      strs.toArray(new String[0]));
+        strs.toArray(new String[0]));
 
     // Check that the result is mutable
     strs.add("z");
@@ -1174,9 +1214,10 @@ public class TestConfiguration extends TestCase {
     }
     return classNames;
   }
-  
+
   enum Dingo { FOO, BAR };
   enum Yak { RAB, FOO };
+  @Test
   public void testEnum() {
     Configuration conf = new Configuration();
     conf.setEnum("test.enum", Dingo.FOO);
@@ -1193,6 +1234,7 @@ public class TestConfiguration extends TestCase {
     assertTrue(fail);
   }
 
+  @Test
   public void testEnumFromXml() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG_FOR_ENUM));
     startConfig();
@@ -1213,6 +1255,7 @@ public class TestConfiguration extends TestCase {
     assertTrue(fail);
   }
 
+  @Test
   public void testTimeDuration() {
     Configuration conf = new Configuration(false);
     conf.setTimeDuration("test.time.a", 7L, SECONDS);
@@ -1246,17 +1289,18 @@ public class TestConfiguration extends TestCase {
     assertEquals(30L, conf.getTimeDuration("test.time.d", 40, SECONDS));
 
     for (Configuration.ParsedTimeDuration ptd :
-         Configuration.ParsedTimeDuration.values()) {
+        Configuration.ParsedTimeDuration.values()) {
       conf.setTimeDuration("test.time.unit", 1, ptd.unit());
       assertEquals(1 + ptd.suffix(), conf.get("test.time.unit"));
       assertEquals(1, conf.getTimeDuration("test.time.unit", 2, ptd.unit()));
     }
   }
 
+  @Test
   public void testTimeDurationWarning() {
     // check warn for possible loss of precision
     final String warnFormat = "Possible loss of precision converting %s" +
-            " to %s for test.time.warn";
+        " to %s for test.time.warn";
     final ArrayList<String> warnchk = new ArrayList<>();
     Configuration wconf = new Configuration(false) {
       @Override
@@ -1290,6 +1334,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(2, warnchk.size());
   }
 
+  @Test
   public void testPattern() throws IOException {
     out = new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1303,20 +1348,21 @@ public class TestConfiguration extends TestCase {
     Pattern defaultPattern = Pattern.compile("x+");
     // Return default if missing
     assertEquals(defaultPattern.pattern(),
-                 conf.getPattern("xxxxx", defaultPattern).pattern());
+        conf.getPattern("xxxxx", defaultPattern).pattern());
     // Return null if empty and default is null
     assertNull(conf.getPattern("test.pattern1", null));
     // Return default for empty
     assertEquals(defaultPattern.pattern(),
-                 conf.getPattern("test.pattern1", defaultPattern).pattern());
+        conf.getPattern("test.pattern1", defaultPattern).pattern());
     // Return default for malformed
     assertEquals(defaultPattern.pattern(),
-                 conf.getPattern("test.pattern2", defaultPattern).pattern());
+        conf.getPattern("test.pattern2", defaultPattern).pattern());
     // Works for correct patterns
     assertEquals("a+b",
-                 conf.getPattern("test.pattern3", defaultPattern).pattern());
+        conf.getPattern("test.pattern3", defaultPattern).pattern());
   }
 
+  @Test
   public void testPropertySource() throws IOException {
     out = new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1329,17 +1375,19 @@ public class TestConfiguration extends TestCase {
     assertEquals(1, sources.length);
     assertEquals(
         "Resource string returned for a file-loaded property" +
-        " must be a proper absolute path",
+            " must be a proper absolute path",
         fileResource,
         new Path(sources[0]));
     assertArrayEquals("Resource string returned for a set() property must be " +
-        "\"programmatically\"",
+            "\"programmatically\"",
         new String[]{"programmatically"},
         conf.getPropertySources("fs.defaultFS"));
-    assertEquals("Resource string returned for an unset property must be null",
+    assertArrayEquals("Resource string returned for an unset property must "
+            + "be null",
         null, conf.getPropertySources("fs.defaultFoo"));
   }
-  
+
+  @Test
   public void testMultiplePropertySource() throws IOException {
     out = new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1354,24 +1402,25 @@ public class TestConfiguration extends TestCase {
     assertEquals("c", sources[2]);
     assertEquals(
         "Resource string returned for a file-loaded property" +
-        " must be a proper absolute path",
+            " must be a proper absolute path",
         fileResource,
         new Path(sources[3]));
   }
 
+  @Test
   public void testSocketAddress() {
     Configuration conf = new Configuration();
     final String defaultAddr = "host:1";
     final int defaultPort = 2;
     InetSocketAddress addr = null;
-    
+
     addr = conf.getSocketAddr("myAddress", defaultAddr, defaultPort);
     assertEquals(defaultAddr, NetUtils.getHostPortString(addr));
-    
+
     conf.set("myAddress", "host2");
     addr = conf.getSocketAddr("myAddress", defaultAddr, defaultPort);
     assertEquals("host2:"+defaultPort, NetUtils.getHostPortString(addr));
-    
+
     conf.set("myAddress", "host2:3");
     addr = conf.getSocketAddr("myAddress", defaultAddr, defaultPort);
     assertEquals("host2:3", NetUtils.getHostPortString(addr));
@@ -1387,35 +1436,38 @@ public class TestConfiguration extends TestCase {
     } catch (IllegalArgumentException iae) {
       threwException = true;
       assertEquals("Does not contain a valid host:port authority: " +
-                   "bad:-port (configuration property 'myAddress')",
-                   iae.getMessage());
-      
+              "bad:-port (configuration property 'myAddress')",
+          iae.getMessage());
+
     } finally {
       assertTrue(threwException);
     }
   }
 
+  @Test
   public void testSetSocketAddress() {
     Configuration conf = new Configuration();
     NetUtils.addStaticResolution("host", "127.0.0.1");
     final String defaultAddr = "host:1";
-    
-    InetSocketAddress addr = NetUtils.createSocketAddr(defaultAddr);    
+
+    InetSocketAddress addr = NetUtils.createSocketAddr(defaultAddr);
     conf.setSocketAddr("myAddress", addr);
     assertEquals(defaultAddr, NetUtils.getHostPortString(addr));
   }
-  
+
+  @Test
   public void testUpdateSocketAddress() throws IOException {
     InetSocketAddress addr = NetUtils.createSocketAddrForHost("host", 1);
     InetSocketAddress connectAddr = conf.updateConnectAddr("myAddress", addr);
     assertEquals(connectAddr.getHostName(), addr.getHostName());
-    
+
     addr = new InetSocketAddress(1);
     connectAddr = conf.updateConnectAddr("myAddress", addr);
     assertEquals(connectAddr.getHostName(),
-                 InetAddress.getLocalHost().getHostName());
+        InetAddress.getLocalHost().getHostName());
   }
 
+  @Test
   public void testReload() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1424,7 +1476,7 @@ public class TestConfiguration extends TestCase {
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
-    
+
     out=new BufferedWriter(new FileWriter(CONFIG2));
     startConfig();
     appendProperty("test.key1", "value1");
@@ -1432,23 +1484,23 @@ public class TestConfiguration extends TestCase {
     endConfig();
     Path fileResource1 = new Path(CONFIG2);
     conf.addResource(fileResource1);
-    
+
     // add a few values via set.
     conf.set("test.key3", "value4");
     conf.set("test.key4", "value5");
-    
+
     assertEquals("final-value1", conf.get("test.key1"));
     assertEquals("value2", conf.get("test.key2"));
     assertEquals("value4", conf.get("test.key3"));
     assertEquals("value5", conf.get("test.key4"));
-    
+
     // change values in the test file...
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
     appendProperty("test.key1", "final-value1");
     appendProperty("test.key3", "final-value3", true);
     endConfig();
-    
+
     conf.reloadConfiguration();
     assertEquals("value1", conf.get("test.key1"));
     // overlayed property overrides.
@@ -1457,6 +1509,7 @@ public class TestConfiguration extends TestCase {
     assertEquals("value5", conf.get("test.key4"));
   }
 
+  @Test
   public void testSize() {
     Configuration conf = new Configuration(false);
     conf.set("a", "A");
@@ -1464,6 +1517,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(2, conf.size());
   }
 
+  @Test
   public void testClear() {
     Configuration conf = new Configuration(false);
     conf.set("a", "A");
@@ -1476,6 +1530,7 @@ public class TestConfiguration extends TestCase {
   public static class Fake_ClassLoader extends ClassLoader {
   }
 
+  @Test
   public void testClassLoader() {
     Configuration conf = new Configuration(false);
     conf.setQuietMode(false);
@@ -1483,7 +1538,7 @@ public class TestConfiguration extends TestCase {
     Configuration other = new Configuration(conf);
     assertTrue(other.getClassLoader() instanceof Fake_ClassLoader);
   }
-  
+
   static class JsonConfiguration {
     JsonProperty[] properties;
 
@@ -1546,6 +1601,7 @@ public class TestConfiguration extends TestCase {
     return ac;
   }
 
+  @Test
   public void testGetSetTrimmedNames() throws IOException {
     Configuration conf = new Configuration(false);
     conf.set(" name", "value");
@@ -1554,6 +1610,7 @@ public class TestConfiguration extends TestCase {
     assertEquals("value", conf.getRaw("  name  "));
   }
 
+  @Test
   public void testDumpProperty() throws IOException {
     StringWriter outWriter = new StringWriter();
     ObjectMapper mapper = new ObjectMapper();
@@ -1668,15 +1725,16 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testDumpConfiguration() throws IOException {
     StringWriter outWriter = new StringWriter();
     Configuration.dumpConfiguration(conf, outWriter);
     String jsonStr = outWriter.toString();
     ObjectMapper mapper = new ObjectMapper();
-    JsonConfiguration jconf = 
-      mapper.readValue(jsonStr, JsonConfiguration.class);
+    JsonConfiguration jconf =
+        mapper.readValue(jsonStr, JsonConfiguration.class);
     int defaultLength = jconf.getProperties().length;
-    
+
     // add 3 keys to the existing configuration properties
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1687,7 +1745,7 @@ public class TestConfiguration extends TestCase {
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
     out.close();
-    
+
     outWriter = new StringWriter();
     Configuration.dumpConfiguration(conf, outWriter);
     jsonStr = outWriter.toString();
@@ -1696,7 +1754,7 @@ public class TestConfiguration extends TestCase {
     int length = jconf.getProperties().length;
     // check for consistency in the number of properties parsed in Json format.
     assertEquals(length, defaultLength+3);
-    
+
     //change few keys in another resource file
     out=new BufferedWriter(new FileWriter(CONFIG2));
     startConfig();
@@ -1706,14 +1764,14 @@ public class TestConfiguration extends TestCase {
     Path fileResource1 = new Path(CONFIG2);
     conf.addResource(fileResource1);
     out.close();
-    
+
     outWriter = new StringWriter();
     Configuration.dumpConfiguration(conf, outWriter);
     jsonStr = outWriter.toString();
     mapper = new ObjectMapper();
     jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
-    
-    // put the keys and their corresponding attributes into a hashmap for their 
+
+    // put the keys and their corresponding attributes into a hashmap for their
     // efficient retrieval
     HashMap<String,JsonProperty> confDump = new HashMap<String,JsonProperty>();
     for(JsonProperty prop : jconf.getProperties()) {
@@ -1724,7 +1782,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(false, confDump.get("test.key1").getIsFinal());
     assertEquals(fileResource1.toString(),
         confDump.get("test.key1").getResource());
-    // check if final parameter test.key2 is not changed, since it is first 
+    // check if final parameter test.key2 is not changed, since it is first
     // loaded as final parameter
     assertEquals("value2", confDump.get("test.key2").getValue());
     assertEquals(true, confDump.get("test.key2").getIsFinal());
@@ -1735,7 +1793,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(false, confDump.get("test.key3").getIsFinal());
     assertEquals(fileResource.toString(),
         confDump.get("test.key3").getResource());
-    // check for resource to be "Unknown" for keys which are loaded using 'set' 
+    // check for resource to be "Unknown" for keys which are loaded using 'set'
     // and expansion of properties
     conf.set("test.key4", "value4");
     conf.set("test.key5", "value5");
@@ -1753,7 +1811,8 @@ public class TestConfiguration extends TestCase {
     assertEquals("programmatically", confDump.get("test.key4").getResource());
     outWriter.close();
   }
-  
+
+  @Test
   public void testDumpConfiguratioWithoutDefaults() throws IOException {
     // check for case when default resources are not loaded
     Configuration config = new Configuration(false);
@@ -1761,12 +1820,12 @@ public class TestConfiguration extends TestCase {
     Configuration.dumpConfiguration(config, outWriter);
     String jsonStr = outWriter.toString();
     ObjectMapper mapper = new ObjectMapper();
-    JsonConfiguration jconf = 
-      mapper.readValue(jsonStr, JsonConfiguration.class);
-    
+    JsonConfiguration jconf =
+        mapper.readValue(jsonStr, JsonConfiguration.class);
+
     //ensure that no properties are loaded.
     assertEquals(0, jconf.getProperties().length);
-    
+
     // add 2 keys
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -1776,13 +1835,13 @@ public class TestConfiguration extends TestCase {
     Path fileResource = new Path(CONFIG);
     config.addResource(fileResource);
     out.close();
-    
+
     outWriter = new StringWriter();
     Configuration.dumpConfiguration(config, outWriter);
     jsonStr = outWriter.toString();
     mapper = new ObjectMapper();
     jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
-    
+
     HashMap<String, JsonProperty>confDump = new HashMap<String, JsonProperty>();
     for (JsonProperty prop : jconf.getProperties()) {
       confDump.put(prop.getKey(), prop);
@@ -1801,6 +1860,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testDumpSensitiveProperty() throws IOException {
     final String myPassword = "ThisIsMyPassword";
     Configuration testConf = new Configuration(false);
@@ -1818,6 +1878,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testDumpSensitiveConfiguration() throws IOException {
     final String myPassword = "ThisIsMyPassword";
     Configuration testConf = new Configuration(false);
@@ -1835,6 +1896,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testGetValByRegex() {
     Configuration conf = new Configuration();
     String key1 = "t.abc.key1";
@@ -1853,10 +1915,11 @@ public class TestConfiguration extends TestCase {
     assertTrue("Picked out wrong key " + key4, !res.containsKey(key4));
   }
 
+  @Test
   public void testGetClassesShouldReturnDefaultValue() throws Exception {
     Configuration config = new Configuration();
-    Class<?>[] classes = 
-      config.getClasses("testClassName", Configuration.class);
+    Class<?>[] classes =
+        config.getClasses("testClassName", Configuration.class);
     assertEquals(
         "Not returning expected number of classes. Number of returned classes ="
             + classes.length, 1, classes.length);
@@ -1864,6 +1927,7 @@ public class TestConfiguration extends TestCase {
         classes[0]);
   }
 
+  @Test
   public void testGetClassesShouldReturnEmptyArray()
       throws Exception {
     Configuration config = new Configuration();
@@ -1873,7 +1937,8 @@ public class TestConfiguration extends TestCase {
         "Not returning expected number of classes. Number of returned classes ="
             + classes.length, 0, classes.length);
   }
-  
+
+  @Test
   public void testSettingValueNull() throws Exception {
     Configuration config = new Configuration();
     try {
@@ -1886,6 +1951,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testSettingKeyNull() throws Exception {
     Configuration config = new Configuration();
     try {
@@ -1897,6 +1963,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testInvalidSubstitution() {
     final Configuration configuration = new Configuration(false);
 
@@ -1913,6 +1980,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testIncompleteSubbing() {
     Configuration configuration = new Configuration(false);
     String key = "test.random.key";
@@ -1931,6 +1999,7 @@ public class TestConfiguration extends TestCase {
     }
   }
 
+  @Test
   public void testBoolean() {
     boolean value = true;
     Configuration configuration = new Configuration();
@@ -1938,6 +2007,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(value, configuration.getBoolean("value", false));
   }
 
+  @Test
   public void testBooleanIfUnset() {
     boolean value = true;
     Configuration configuration = new Configuration();
@@ -1947,20 +2017,23 @@ public class TestConfiguration extends TestCase {
     assertEquals(value, configuration.getBoolean("value", false));
   }
 
+  @Test
   public void testFloat() {
     float value = 1.0F;
     Configuration configuration = new Configuration();
     configuration.setFloat("value", value);
-    assertEquals(value, configuration.getFloat("value", 0.0F));
+    assertEquals(value, configuration.getFloat("value", 0.0F), DOUBLE_DELTA);
   }
-  
+
+  @Test
   public void testDouble() {
     double value = 1.0D;
     Configuration configuration = new Configuration();
     configuration.setDouble("value", value);
-    assertEquals(value, configuration.getDouble("value", 0.0D));
+    assertEquals(value, configuration.getDouble("value", 0.0D), DOUBLE_DELTA);
   }
 
+  @Test
   public void testInt() {
     int value = 1;
     Configuration configuration = new Configuration();
@@ -1968,6 +2041,7 @@ public class TestConfiguration extends TestCase {
     assertEquals(value, configuration.getInt("value", 0));
   }
 
+  @Test
   public void testLong() {
     long value = 1L;
     Configuration configuration = new Configuration();
@@ -1975,16 +2049,18 @@ public class TestConfiguration extends TestCase {
     assertEquals(value, configuration.getLong("value", 0L));
   }
 
+  @Test
   public void testStrings() {
     String [] strings = {"FOO","BAR"};
     Configuration configuration = new Configuration();
     configuration.setStrings("strings", strings);
     String [] returnStrings = configuration.getStrings("strings");
     for(int i=0;i<returnStrings.length;i++) {
-       assertEquals(strings[i], returnStrings[i]);
+      assertEquals(strings[i], returnStrings[i]);
     }
   }
-  
+
+  @Test
   public void testSetPattern() {
     Pattern testPattern = Pattern.compile("a+b");
     Configuration configuration = new Configuration();
@@ -1992,13 +2068,15 @@ public class TestConfiguration extends TestCase {
     assertEquals(testPattern.pattern(),
         configuration.getPattern("testPattern", Pattern.compile("")).pattern());
   }
-  
+
+  @Test
   public void testGetClassByNameOrNull() throws Exception {
-   Configuration config = new Configuration();
-   Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
-   assertNotNull(clazz);
+    Configuration config = new Configuration();
+    Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
+    assertNotNull(clazz);
   }
 
+  @Test
   public void testGetFinalParameters() throws Exception {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -2021,6 +2099,7 @@ public class TestConfiguration extends TestCase {
    * by SPARK-2546.
    * @throws Exception
    */
+  @Test
   public void testConcurrentAccesses() throws Exception {
     out = new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -2061,6 +2140,7 @@ public class TestConfiguration extends TestCase {
     // it's expected behaviour.
   }
 
+  @Test
   public void testNullValueProperties() throws Exception {
     Configuration conf = new Configuration();
     conf.setAllowNullValueProperties(true);
@@ -2074,6 +2154,7 @@ public class TestConfiguration extends TestCase {
     assertEquals("value", conf.get("attr"));
   }
 
+  @Test
   public void testGetPasswordDeprecatedKeyStored() throws Exception {
     final String oldKey = "test.password.old.key";
     final String newKey = "test.password.new.key";
@@ -2093,14 +2174,15 @@ public class TestConfiguration extends TestCase {
 
     Configuration.addDeprecation(oldKey, newKey);
 
-    Assert.assertThat(conf.getPassword(newKey),
+    assertThat(conf.getPassword(newKey),
         CoreMatchers.is(password.toCharArray()));
-    Assert.assertThat(conf.getPassword(oldKey),
+    assertThat(conf.getPassword(oldKey),
         CoreMatchers.is(password.toCharArray()));
 
     FileUtil.fullyDelete(tmpDir);
   }
 
+  @Test
   public void testGetPasswordByDeprecatedKey() throws Exception {
     final String oldKey = "test.password.old.key";
     final String newKey = "test.password.new.key";
@@ -2120,9 +2202,9 @@ public class TestConfiguration extends TestCase {
 
     Configuration.addDeprecation(oldKey, newKey);
 
-    Assert.assertThat(conf.getPassword(newKey),
+    assertThat(conf.getPassword(newKey),
         CoreMatchers.is(password.toCharArray()));
-    Assert.assertThat(conf.getPassword(oldKey),
+    assertThat(conf.getPassword(oldKey),
         CoreMatchers.is(password.toCharArray()));
 
     FileUtil.fullyDelete(tmpDir);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationSubclass.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationSubclass.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationSubclass.java
index fd2fa38..e15e699 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationSubclass.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationSubclass.java
@@ -17,7 +17,8 @@
  */
 package org.apache.hadoop.conf;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import java.util.Properties;
 
@@ -25,11 +26,12 @@ import java.util.Properties;
  * Created 21-Jan-2009 13:42:36
  */
 
-public class TestConfigurationSubclass extends TestCase {
+public class TestConfigurationSubclass {
   private static final String EMPTY_CONFIGURATION_XML
           = "/org/apache/hadoop/conf/empty-configuration.xml";
 
 
+  @Test
   public void testGetProps() {
     SubConf conf = new SubConf(true);
     Properties properties = conf.getProperties();
@@ -37,6 +39,7 @@ public class TestConfigurationSubclass extends TestCase {
             properties.getProperty("hadoop.tmp.dir"));
   }
 
+  @Test
   public void testReload() throws Throwable {
     SubConf conf = new SubConf(true);
     assertFalse(conf.isReloaded());
@@ -45,6 +48,7 @@ public class TestConfigurationSubclass extends TestCase {
     Properties properties = conf.getProperties();
   }
 
+  @Test
   public void testReloadNotQuiet() throws Throwable {
     SubConf conf = new SubConf(true);
     conf.setQuietMode(false);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
index 167daa5..fd01650 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java
@@ -21,15 +21,14 @@ package org.apache.hadoop.conf;
 import java.io.ByteArrayOutputStream;
 import java.util.Map;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.junit.Test;
+import static org.junit.Assert.*;
 
-import junit.framework.TestCase;
-
-public class TestDeprecatedKeys extends TestCase {
+public class TestDeprecatedKeys {
  
   //Tests a deprecated key
+  @Test
   public void testDeprecatedKeys() throws Exception {
     Configuration conf = new Configuration();
     conf.set("topology.script.file.name", "xyz");
@@ -39,6 +38,7 @@ public class TestDeprecatedKeys extends TestCase {
   }
   
   //Tests reading / writing a conf file with deprecation after setting
+  @Test
   public void testReadWriteWithDeprecatedKeys() throws Exception {
     Configuration conf = new Configuration();
     conf.setBoolean("old.config.yet.to.be.deprecated", true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestGetInstances.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestGetInstances.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestGetInstances.java
index 57b7ff4..bc08e66 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestGetInstances.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestGetInstances.java
@@ -18,10 +18,11 @@
 package org.apache.hadoop.conf;
 
 import java.util.List;
+import org.junit.Test;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.*;
 
-public class TestGetInstances extends TestCase {
+public class TestGetInstances {
   
   interface SampleInterface {}
   
@@ -30,7 +31,7 @@ public class TestGetInstances extends TestCase {
   static class SampleClass implements SampleInterface {
     SampleClass() {}
   }
-	
+
   static class AnotherClass implements ChildInterface {
     AnotherClass() {}
   }
@@ -39,6 +40,7 @@ public class TestGetInstances extends TestCase {
    * Makes sure <code>Configuration.getInstances()</code> returns
    * instances of the required type.
    */
+  @Test
   public void testGetInstances() throws Exception {
     Configuration conf = new Configuration();
     

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
index 4009a60..f182fe5 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java
@@ -24,9 +24,10 @@ import java.io.OutputStreamWriter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.GenericTestUtils;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestAvroFSInput extends TestCase {
+public class TestAvroFSInput {
 
   private static final String INPUT_DIR = "AvroFSInput";
 
@@ -34,6 +35,7 @@ public class TestAvroFSInput extends TestCase {
     return new Path(GenericTestUtils.getTempPath(INPUT_DIR));
   }
 
+  @Test
   public void testAFSInput() throws Exception {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
index 615d0b5..a22b765 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.fs;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -29,16 +32,16 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.test.GenericTestUtils;
 
 /** This test makes sure that "DU" does not get to run on each call to getUsed */
-public class TestDU extends TestCase {
+public class TestDU {
   final static private File DU_DIR = GenericTestUtils.getTestDir("dutmp");
 
-  @Override
+  @Before
   public void setUp() {
-      FileUtil.fullyDelete(DU_DIR);
-      assertTrue(DU_DIR.mkdirs());
+    FileUtil.fullyDelete(DU_DIR);
+    assertTrue(DU_DIR.mkdirs());
   }
 
-  @Override
+  @After
   public void tearDown() throws IOException {
       FileUtil.fullyDelete(DU_DIR);
   }
@@ -69,6 +72,7 @@ public class TestDU extends TestCase {
    * @throws IOException
    * @throws InterruptedException
    */
+  @Test
   public void testDU() throws IOException, InterruptedException {
     final int writtenSize = 32*1024;   // writing 32K
     // Allow for extra 4K on-disk slack for local file systems
@@ -107,6 +111,8 @@ public class TestDU extends TestCase {
         duSize >= writtenSize &&
         writtenSize <= (duSize + slack));
   }
+
+  @Test
   public void testDUGetUsedWillNotReturnNegative() throws IOException {
     File file = new File(DU_DIR, "data");
     assertTrue(file.createNewFile());
@@ -118,6 +124,7 @@ public class TestDU extends TestCase {
     assertTrue(String.valueOf(duSize), duSize >= 0L);
   }
 
+  @Test
   public void testDUSetInitialValue() throws IOException {
     File file = new File(DU_DIR, "dataX");
     createFile(file, 8192);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
index a2f0905..5ed743f 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java
@@ -23,12 +23,12 @@ import java.lang.reflect.Modifier;
 import java.net.URI;
 import java.util.Iterator;
 
-import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
+import org.junit.Test;
 
-public class TestFilterFs extends TestCase {
+public class TestFilterFs {
 
   private static final Log LOG = FileSystem.LOG;
 
@@ -41,7 +41,8 @@ public class TestFilterFs extends TestCase {
       return null;
     }
   }
-  
+
+  @Test
   public void testFilterFileSystem() throws Exception {
     for (Method m : AbstractFileSystem.class.getDeclaredMethods()) {
       if (Modifier.isStatic(m.getModifiers()))
@@ -69,6 +70,7 @@ public class TestFilterFs extends TestCase {
   
   // Test that FilterFs will accept an AbstractFileSystem to be filtered which
   // has an optional authority, such as ViewFs
+  @Test
   public void testFilteringWithNonrequiredAuthority() throws Exception {
     Configuration conf = new Configuration();
     ConfigUtil.addLink(conf, "custom", "/mnt", URI.create("file:///"));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
index 87265f4..f43480e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java
@@ -22,7 +22,10 @@ import java.util.Arrays;
 import java.util.Comparator;
 import java.util.Random;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.GenericTestUtils;
@@ -30,7 +33,7 @@ import org.apache.hadoop.test.GenericTestUtils;
 /**
  * Testing the correctness of FileSystem.getFileBlockLocations.
  */
-public class TestGetFileBlockLocations extends TestCase {
+public class TestGetFileBlockLocations {
   private static String TEST_ROOT_DIR = GenericTestUtils.getTempPath(
       "testGetFileBlockLocations");
   private static final int FileLength = 4 * 1024 * 1024; // 4MB
@@ -39,11 +42,8 @@ public class TestGetFileBlockLocations extends TestCase {
   private FileSystem fs;
   private Random random;
 
-  /**
-   * @see TestCase#setUp()
-   */
-  @Override
-  protected void setUp() throws IOException {
+  @Before
+  public void setUp() throws IOException {
     conf = new Configuration();
     Path rootPath = new Path(TEST_ROOT_DIR);
     path = new Path(rootPath, "TestGetFileBlockLocations");
@@ -91,15 +91,14 @@ public class TestGetFileBlockLocations extends TestCase {
       assertTrue(locations.length == 0);
     }
   }
-  /**
-   * @see TestCase#tearDown()
-   */
-  @Override
-  protected void tearDown() throws IOException {
+
+  @After
+  public void tearDown() throws IOException {
     fs.delete(path, true);
     fs.close();
   }
 
+  @Test
   public void testFailureNegativeParameters() throws IOException {
     FileStatus status = fs.getFileStatus(path);
     try {
@@ -117,6 +116,7 @@ public class TestGetFileBlockLocations extends TestCase {
     }
   }
 
+  @Test
   public void testGetFileBlockLocations1() throws IOException {
     FileStatus status = fs.getFileStatus(path);
     oneTest(0, (int) status.getLen(), status);
@@ -130,6 +130,7 @@ public class TestGetFileBlockLocations extends TestCase {
     }
   }
 
+  @Test
   public void testGetFileBlockLocations2() throws IOException {
     FileStatus status = fs.getFileStatus(path);
     for (int i = 0; i < 1000; ++i) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
index b0466b8..9d75ba0 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java
@@ -20,10 +20,12 @@ package org.apache.hadoop.fs;
 import java.io.IOException;
 import java.util.List;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestGlobExpander extends TestCase {
+public class TestGlobExpander {
 
+  @Test
   public void testExpansionIsIdentical() throws IOException {
     checkExpansionIsIdentical("");
     checkExpansionIsIdentical("/}");
@@ -35,6 +37,7 @@ public class TestGlobExpander extends TestCase {
     checkExpansionIsIdentical("p{a\\/b,c\\/d}s");
   }
 
+  @Test
   public void testExpansion() throws IOException {
     checkExpansion("{a/b}", "a/b");
     checkExpansion("/}{a/b}", "/}a/b");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
index 7a5b25e..12aed29 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
@@ -33,20 +33,21 @@ import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.TrashPolicyDefault.Emptier;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Time;
-import org.junit.Before;
-import org.junit.Test;
 
 /**
  * This class tests commands from Trash.
  */
-public class TestTrash extends TestCase {
+public class TestTrash {
 
   private final static Path TEST_DIR = new Path(GenericTestUtils.getTempPath(
       "testTrash"));
@@ -507,19 +508,22 @@ public class TestTrash extends TestCase {
     }
   }
 
+  @Test
   public void testTrash() throws IOException {
     Configuration conf = new Configuration();
     conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
     trashShell(FileSystem.getLocal(conf), TEST_DIR);
   }
 
+  @Test
   public void testNonDefaultFS() throws IOException {
     Configuration conf = new Configuration();
     conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
     conf.set("fs.defaultFS", "invalid://host/bar/foo");
     trashNonDefaultFS(conf);
   }
-  
+
+  @Test
   public void testPluggableTrash() throws IOException {
     Configuration conf = new Configuration();
 
@@ -604,6 +608,7 @@ public class TestTrash extends TestCase {
     verifyTrashPermission(FileSystem.getLocal(conf), conf);
   }
 
+  @Test
   public void testTrashEmptier() throws Exception {
     Configuration conf = new Configuration();
     // Trash with 12 second deletes and 6 seconds checkpoints
@@ -665,12 +670,9 @@ public class TestTrash extends TestCase {
     emptierThread.interrupt();
     emptierThread.join();
   }
-  
-  /**
-   * @see TestCase#tearDown()
-   */
-  @Override
-  protected void tearDown() throws IOException {
+
+  @After
+  public void tearDown() throws IOException {
     File trashDir = new File(TEST_DIR.toUri().getPath());
     if (trashDir.exists() && !FileUtil.fullyDelete(trashDir)) {
       throw new IOException("Cannot remove data directory: " + trashDir);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java
index 41c4d47..799471b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java
@@ -20,16 +20,17 @@ package org.apache.hadoop.fs;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Test;
 
 /**
  * test for the input truncation bug when mark/reset is used.
  * HADOOP-1489
  */
-public class TestTruncatedInputBug extends TestCase {
+public class TestTruncatedInputBug {
   private static String TEST_ROOT_DIR =
       GenericTestUtils.getTestDir().getAbsolutePath();
   
@@ -49,6 +50,7 @@ public class TestTruncatedInputBug extends TestCase {
    * checksum file system currently depends on the request size
    * >= bytesPerSum to work properly.
    */
+  @Test
   public void testTruncatedInputBug() throws IOException {
     final int ioBufSize = 512;
     final int fileSize = ioBufSize*4;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java
index 6368a57..a22985d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java
@@ -21,11 +21,14 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import static org.apache.hadoop.fs.permission.FsAction.*;
 
-public class TestFsPermission extends TestCase {
+public class TestFsPermission {
+
+  @Test
   public void testFsAction() {
     //implies
     for(FsAction a : FsAction.values()) {
@@ -53,6 +56,7 @@ public class TestFsPermission extends TestCase {
    * Ensure that when manually specifying permission modes we get
    * the expected values back out for all combinations
    */
+  @Test
   public void testConvertingPermissions() {
     for(short s = 0; s <= 01777; s++) {
       assertEquals(s, new FsPermission(s).toShort());
@@ -80,6 +84,7 @@ public class TestFsPermission extends TestCase {
     assertEquals(02000, s);
   }
 
+  @Test
   public void testSpecialBitsToString() {
     for (boolean sb : new boolean[] { false, true }) {
       for (FsAction u : FsAction.values()) {
@@ -106,6 +111,7 @@ public class TestFsPermission extends TestCase {
     }
   }
 
+  @Test
   public void testFsPermission() {
     String symbolic = "-rwxrwxrwx";
 
@@ -132,6 +138,7 @@ public class TestFsPermission extends TestCase {
     }
   }
 
+  @Test
   public void testSymbolicPermission() {
     for (int i = 0; i < SYMBOLIC.length; ++i) {
       short val = 0777;
@@ -146,6 +153,7 @@ public class TestFsPermission extends TestCase {
     }
   }
 
+  @Test
   public void testUMaskParser() throws IOException {
     Configuration conf = new Configuration();
     
@@ -163,6 +171,7 @@ public class TestFsPermission extends TestCase {
     }
   }
 
+  @Test
   public void testSymbolicUmasks() {
     Configuration conf = new Configuration();
     
@@ -176,6 +185,7 @@ public class TestFsPermission extends TestCase {
     assertEquals(0111, FsPermission.getUMask(conf).toShort());
   }
 
+  @Test
   public void testBadUmasks() {
     Configuration conf = new Configuration();
     

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
index 6b1cd29..d82a2f1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestFairCallQueue.java
@@ -25,7 +25,9 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.mockito.Mockito.times;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
@@ -39,13 +41,12 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.BlockingQueue;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.junit.Test;
 import org.mockito.Mockito;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 
-public class TestFairCallQueue extends TestCase {
+public class TestFairCallQueue {
   private FairCallQueue<Schedulable> fcq;
 
   private Schedulable mockCall(String id, int priority) {
@@ -65,6 +66,7 @@ public class TestFairCallQueue extends TestCase {
   }
 
   @SuppressWarnings("deprecation")
+  @Before
   public void setUp() {
     Configuration conf = new Configuration();
     conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
@@ -74,6 +76,7 @@ public class TestFairCallQueue extends TestCase {
 
   // Validate that the total capacity of all subqueues equals
   // the maxQueueSize for different values of maxQueueSize
+  @Test
   public void testTotalCapacityOfSubQueues() {
     Configuration conf = new Configuration();
     FairCallQueue<Schedulable> fairCallQueue;
@@ -291,11 +294,12 @@ public class TestFairCallQueue extends TestCase {
 
   //
   // Ensure that FairCallQueue properly implements BlockingQueue
-  //
+  @Test
   public void testPollReturnsNullWhenEmpty() {
     assertNull(fcq.poll());
   }
 
+  @Test
   public void testPollReturnsTopCallWhenNotEmpty() {
     Schedulable call = mockCall("c");
     assertTrue(fcq.offer(call));
@@ -306,6 +310,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(0, fcq.size());
   }
 
+  @Test
   public void testOfferSucceeds() {
 
     for (int i = 0; i < 5; i++) {
@@ -316,6 +321,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(5, fcq.size());
   }
 
+  @Test
   public void testOfferFailsWhenFull() {
     for (int i = 0; i < 5; i++) { assertTrue(fcq.offer(mockCall("c"))); }
 
@@ -324,6 +330,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(5, fcq.size());
   }
 
+  @Test
   public void testOfferSucceedsWhenScheduledLowPriority() {
     // Scheduler will schedule into queue 0 x 5, then queue 1
     int mockedPriorities[] = {0, 0, 0, 0, 0, 1, 0};
@@ -334,10 +341,12 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(6, fcq.size());
   }
 
+  @Test
   public void testPeekNullWhenEmpty() {
     assertNull(fcq.peek());
   }
 
+  @Test
   public void testPeekNonDestructive() {
     Schedulable call = mockCall("c", 0);
     assertTrue(fcq.offer(call));
@@ -347,6 +356,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(1, fcq.size());
   }
 
+  @Test
   public void testPeekPointsAtHead() {
     Schedulable call = mockCall("c", 0);
     Schedulable next = mockCall("b", 0);
@@ -356,10 +366,12 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(call, fcq.peek()); // Peek points at the head
   }
 
+  @Test
   public void testPollTimeout() throws InterruptedException {
     assertNull(fcq.poll(10, TimeUnit.MILLISECONDS));
   }
 
+  @Test
   public void testPollSuccess() throws InterruptedException {
     Schedulable call = mockCall("c", 0);
     assertTrue(fcq.offer(call));
@@ -369,6 +381,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(0, fcq.size());
   }
 
+  @Test
   public void testOfferTimeout() throws InterruptedException {
     for (int i = 0; i < 5; i++) {
       assertTrue(fcq.offer(mockCall("c"), 10, TimeUnit.MILLISECONDS));
@@ -380,6 +393,7 @@ public class TestFairCallQueue extends TestCase {
   }
 
   @SuppressWarnings("deprecation")
+  @Test
   public void testDrainTo() {
     Configuration conf = new Configuration();
     conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
@@ -397,6 +411,7 @@ public class TestFairCallQueue extends TestCase {
   }
 
   @SuppressWarnings("deprecation")
+  @Test
   public void testDrainToWithLimit() {
     Configuration conf = new Configuration();
     conf.setInt("ns." + FairCallQueue.IPC_CALLQUEUE_PRIORITY_LEVELS_KEY, 2);
@@ -413,16 +428,19 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(2, fcq2.size());
   }
 
+  @Test
   public void testInitialRemainingCapacity() {
     assertEquals(10, fcq.remainingCapacity());
   }
 
+  @Test
   public void testFirstQueueFullRemainingCapacity() {
     while (fcq.offer(mockCall("c"))) ; // Queue 0 will fill up first, then queue 1
 
     assertEquals(5, fcq.remainingCapacity());
   }
 
+  @Test
   public void testAllQueuesFullRemainingCapacity() {
     int[] mockedPriorities = {0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0};
     int i = 0;
@@ -432,6 +450,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(10, fcq.size());
   }
 
+  @Test
   public void testQueuesPartialFilledRemainingCapacity() {
     int[] mockedPriorities = {0, 1, 0, 1, 0};
     for (int i = 0; i < 5; i++) { fcq.offer(mockCall("c", mockedPriorities[i])); }
@@ -555,12 +574,14 @@ public class TestFairCallQueue extends TestCase {
   }
 
   // Make sure put will overflow into lower queues when the top is full
+  @Test
   public void testPutOverflows() throws InterruptedException {
     // We can fit more than 5, even though the scheduler suggests the top queue
     assertCanPut(fcq, 8, 8);
     assertEquals(8, fcq.size());
   }
 
+  @Test
   public void testPutBlocksWhenAllFull() throws InterruptedException {
     assertCanPut(fcq, 10, 10); // Fill up
     assertEquals(10, fcq.size());
@@ -569,10 +590,12 @@ public class TestFairCallQueue extends TestCase {
     assertCanPut(fcq, 0, 1); // Will block
   }
 
+  @Test
   public void testTakeBlocksWhenEmpty() throws InterruptedException {
     assertCanTake(fcq, 0, 1);
   }
 
+  @Test
   public void testTakeRemovesCall() throws InterruptedException {
     Schedulable call = mockCall("c");
     fcq.offer(call);
@@ -581,6 +604,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(0, fcq.size());
   }
 
+  @Test
   public void testTakeTriesNextQueue() throws InterruptedException {
 
     // A mux which only draws from q 0
@@ -597,6 +621,7 @@ public class TestFairCallQueue extends TestCase {
     assertEquals(0, fcq.size());
   }
 
+  @Test
   public void testFairCallQueueMXBean() throws Exception {
     MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
     ObjectName mxbeanName = new ObjectName(

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
index 9fea50e..d41a587 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
@@ -20,7 +20,8 @@ package org.apache.hadoop.log;
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.node.ContainerNode;
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.Time;
@@ -33,7 +34,6 @@ import org.apache.log4j.spi.HierarchyEventListener;
 import org.apache.log4j.spi.LoggerFactory;
 import org.apache.log4j.spi.LoggerRepository;
 import org.apache.log4j.spi.ThrowableInformation;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.io.StringWriter;
@@ -42,7 +42,7 @@ import java.net.NoRouteToHostException;
 import java.util.Enumeration;
 import java.util.Vector;
 
-public class TestLog4Json extends TestCase {
+public class TestLog4Json {
 
   private static final Log LOG = LogFactory.getLog(TestLog4Json.class);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java
index e201787..0d0d5b1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java
@@ -19,15 +19,12 @@ package org.apache.hadoop.net;
 
 import java.util.ArrayList;
 import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-
-import junit.framework.TestCase;
 import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestScriptBasedMapping extends TestCase {
-
+import org.apache.hadoop.conf.Configuration;
 
+public class TestScriptBasedMapping {
   
   public TestScriptBasedMapping() {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java
index 77da45b..8638591 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMappingWithDependency.java
@@ -19,13 +19,12 @@ package org.apache.hadoop.net;
 
 import java.util.ArrayList;
 import java.util.List;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 
-import junit.framework.TestCase;
-import org.junit.Test;
-
-public class TestScriptBasedMappingWithDependency extends TestCase {
+public class TestScriptBasedMappingWithDependency {
 
   
   public TestScriptBasedMappingWithDependency() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
index 64cd9b7..9fae536 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
@@ -17,19 +17,21 @@
 package org.apache.hadoop.security;
 
 
-import junit.framework.TestCase;
+import static org.junit.Assert.*;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
 import java.util.Map;
 
-public class TestAuthenticationFilter extends TestCase {
+public class TestAuthenticationFilter {
 
   @SuppressWarnings("unchecked")
+  @Test
   public void testConfiguration() throws Exception {
     Configuration conf = new Configuration();
     conf.set("hadoop.http.authentication.foo", "bar");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationWithProxyUserFilter.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationWithProxyUserFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationWithProxyUserFilter.java
index 504f5a1..dac6a55 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationWithProxyUserFilter.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationWithProxyUserFilter.java
@@ -16,8 +16,8 @@
  */
 package org.apache.hadoop.security;
 
-
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.FilterContainer;
@@ -30,9 +30,10 @@ import java.util.Map;
  * This class is tested for {@link AuthenticationWithProxyUserFilter}
  * to verify configurations of this filter.
  */
-public class TestAuthenticationWithProxyUserFilter extends TestCase {
+public class TestAuthenticationWithProxyUserFilter {
 
   @SuppressWarnings("unchecked")
+  @Test
   public void testConfiguration() throws Exception {
     Configuration conf = new Configuration();
     conf.set("hadoop.http.authentication.foo", "bar");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
index 684ef3b..03fc4cb 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
@@ -21,17 +21,18 @@ import java.io.IOException;
 import java.net.InetAddress;
 import java.util.Map;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.WhitelistBasedResolver;
 import org.apache.hadoop.util.TestFileBasedIPList;
 
-public class TestWhitelistBasedResolver extends TestCase {
+public class TestWhitelistBasedResolver {
 
   public static final Map<String, String> SASL_PRIVACY_PROPS =
     WhitelistBasedResolver.getSaslProperties(new Configuration());
 
+  @Test
   public void testFixedVariableAndLocalWhiteList() throws IOException {
 
     String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
@@ -79,6 +80,7 @@ public class TestWhitelistBasedResolver extends TestCase {
    * Check  for inclusion in whitelist
    * Check for exclusion from whitelist
    */
+  @Test
   public void testFixedAndLocalWhiteList() throws IOException {
 
     String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};
@@ -128,6 +130,7 @@ public class TestWhitelistBasedResolver extends TestCase {
    * Add a bunch of subnets and IPSs to the whitelist
    * Check  for inclusion in whitelist with a null value
    */
+  @Test
   public void testNullIPAddress() throws IOException {
 
     String[] fixedIps = {"10.119.103.112", "10.221.102.0/23"};

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java
index 1741eb7..f6e5133 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java
@@ -25,11 +25,12 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
+import org.junit.Test;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.*;
 
 /** Unit tests for Token */
-public class TestToken extends TestCase {
+public class TestToken {
 
   static boolean isEqual(Object a, Object b) {
     return a == null ? b == null : a.equals(b);
@@ -45,6 +46,7 @@ public class TestToken extends TestCase {
   /**
    * Test token serialization
    */
+  @Test
   public void testTokenSerialization() throws IOException {
     // Get a token
     Token<TokenIdentifier> sourceToken = new Token<TokenIdentifier>();
@@ -76,7 +78,8 @@ public class TestToken extends TestCase {
     }
   }
 
-  public static void testEncodeWritable() throws Exception {
+  @Test
+  public void testEncodeWritable() throws Exception {
     String[] values = new String[]{"", "a", "bb", "ccc", "dddd", "eeeee",
         "ffffff", "ggggggg", "hhhhhhhh", "iiiiiiiii",
         "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLM" +
@@ -96,7 +99,8 @@ public class TestToken extends TestCase {
       checkUrlSafe(encode);
     }
   }
-  
+
+  @Test
   public void testDecodeIdentifier() throws IOException {
     TestDelegationTokenSecretManager secretManager =
       new TestDelegationTokenSecretManager(0, 0, 0, 0);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b7cbe38/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
index 58935f2..f36c586 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java
@@ -17,17 +17,15 @@
  */
 package org.apache.hadoop.util;
 
-import junit.framework.TestCase;
-
-import org.apache.hadoop.util.AsyncDiskService;
 import org.junit.Test;
+import static org.junit.Assert.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * A test for AsyncDiskService.
  */
-public class TestAsyncDiskService extends TestCase {
+public class TestAsyncDiskService {
   
   public static final Logger LOG =
       LoggerFactory.getLogger(TestAsyncDiskService.class);


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org