You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by hi...@apache.org on 2016/06/15 16:42:04 UTC

[23/97] [abbrv] [partial] incubator-geode git commit: GEODE-837: update tests from JUnit3 to JUnit4

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
index 502fa1d..a18ec3e 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
@@ -16,6 +16,16 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Properties;
+
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
 import com.gemstone.gemfire.cache.Cache;
 import com.gemstone.gemfire.cache.CacheFactory;
 import com.gemstone.gemfire.distributed.DistributedSystem;
@@ -24,23 +34,21 @@ import com.gemstone.gemfire.test.dunit.ThreadUtils;
 import com.gemstone.gemfire.test.dunit.Wait;
 import com.gemstone.gemfire.test.dunit.WaitCriterion;
 import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
 
-import java.util.Properties;
+@Category(IntegrationTest.class)
+public class BlockingHARegionJUnitTest {
 
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
-import static org.junit.Assert.*;
+  private static Cache cache = null;
 
-@Category(IntegrationTest.class)
-public class BlockingHARegionJUnitTest
-{
-  static Cache cache = null;
+  /** boolean to record an exception occurence in another thread**/
+  private static volatile boolean exceptionOccured = false;
+  /** StringBuffer to store the exception**/
+  private static StringBuffer exceptionString = new StringBuffer();
+  /** boolen to quit the for loop**/
+  private static volatile boolean quitForLoop = false;
 
   @Before
-  public void setUp() throws Exception
-  {
+  public void setUp() throws Exception {
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     if (cache != null) {
@@ -50,43 +58,34 @@ public class BlockingHARegionJUnitTest
       .connect(props));
   }
 
-/**
- * This test has a scenario where the HAReqionQueue capacity is just 1. There will
- * be two thread. One doing a 1000 puts and the other doing a 1000 takes. The validation
- * for this test is that it should not encounter any exceptions
- *
- */
+  /**
+   * This test has a scenario where the HAReqionQueue capacity is just 1. There will
+   * be two thread. One doing a 1000 puts and the other doing a 1000 takes. The validation
+   * for this test is that it should not encounter any exceptions
+   */
   @Test
-  public void testBoundedPuts()
-  {
-    try {
-      exceptionOccured = false;
-      HARegionQueueAttributes harqa = new HARegionQueueAttributes();
-      harqa.setBlockingQueueCapacity(1);
-      HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
-          "BlockingHARegionJUnitTest_Region", cache, harqa,
-          HARegionQueue.BLOCKING_HA_QUEUE, false);
-      hrq.setPrimary(true);//fix for 40314 - capacity constraint is checked for primary only.
-      Thread thread1 = new DoPuts(hrq,1000);
-      Thread thread2 = new DoTake(hrq,1000);
-
-      thread1.start();
-      thread2.start();
+  public void testBoundedPuts() throws Exception {
+    exceptionOccured = false;
+    HARegionQueueAttributes harqa = new HARegionQueueAttributes();
+    harqa.setBlockingQueueCapacity(1);
+    HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
+        "BlockingHARegionJUnitTest_Region", cache, harqa,
+        HARegionQueue.BLOCKING_HA_QUEUE, false);
+    hrq.setPrimary(true);//fix for 40314 - capacity constraint is checked for primary only.
+    Thread thread1 = new DoPuts(hrq,1000);
+    Thread thread2 = new DoTake(hrq,1000);
 
-      ThreadUtils.join(thread1, 30 * 1000);
-      ThreadUtils.join(thread2, 30 * 1000);
+    thread1.start();
+    thread2.start();
 
-      if (exceptionOccured) {
-        fail(" Test failed due to " + exceptionString);
-      }
-      
-      cache.close();
+    ThreadUtils.join(thread1, 30 * 1000);
+    ThreadUtils.join(thread2, 30 * 1000);
 
-    }
-    catch (Exception e) {
-      fail(" Test encountered an exception "+e);
+    if (exceptionOccured) {
+      fail(" Test failed due to " + exceptionString);
     }
 
+    cache.close();
   }
 
   /**
@@ -97,67 +96,60 @@ public class BlockingHARegionJUnitTest
    * is started. This will cause the region size to come down by one and the put thread waiting
    * will go ahead and do the put. The thread should then die and the region size should be validated
    * to reflect that.
-   *
    */
   @Test
-  public void testPutBeingBlocked()
-  {
-    try {
-      exceptionOccured = false;
-      quitForLoop = false;
-      HARegionQueueAttributes harqa = new HARegionQueueAttributes();
-      harqa.setBlockingQueueCapacity(1);
-      final HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
-          "BlockingHARegionJUnitTest_Region", cache, harqa,
-          HARegionQueue.BLOCKING_HA_QUEUE, false);
-      hrq.setPrimary(true);//fix for 40314 - capacity constraint is checked for primary only.
-      final Thread thread1 = new DoPuts(hrq,2);
-      thread1.start();
-      WaitCriterion ev = new WaitCriterion() {
-        public boolean done() {
-          return hrq.region.size() == 2;
-        }
-        public String description() {
-          return null;
-        }
-      };
-      Wait.waitForCriterion(ev, 1000, 200, true);
-      assertTrue(thread1.isAlive()); //thread should still be alive (in wait state)
-      
-      Thread thread2 = new DoTake(hrq,1);
-      thread2.start(); //start take thread
-      ev = new WaitCriterion() {
-        public boolean done() {
-          return hrq.region.size() == 3;
-        }
-        public String description() {
-          return null;
-        }
-      };
-      //sleep. take will proceed and so will sleeping put
-      Wait.waitForCriterion(ev, 3 * 1000, 200, true);
+  public void testPutBeingBlocked() throws Exception {
+    exceptionOccured = false;
+    quitForLoop = false;
+    HARegionQueueAttributes harqa = new HARegionQueueAttributes();
+    harqa.setBlockingQueueCapacity(1);
+    final HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
+        "BlockingHARegionJUnitTest_Region", cache, harqa,
+        HARegionQueue.BLOCKING_HA_QUEUE, false);
+    hrq.setPrimary(true);//fix for 40314 - capacity constraint is checked for primary only.
+    final Thread thread1 = new DoPuts(hrq,2);
+    thread1.start();
+    WaitCriterion ev = new WaitCriterion() {
+      public boolean done() {
+        return hrq.region.size() == 2;
+      }
+      public String description() {
+        return null;
+      }
+    };
+    Wait.waitForCriterion(ev, 1000, 200, true);
+    assertTrue(thread1.isAlive()); //thread should still be alive (in wait state)
 
-      // thread should have died since put should have proceeded
-      ev = new WaitCriterion() {
-        public boolean done() {
-          return !thread1.isAlive();
-        }
-        public String description() {
-          return "thread1 still alive";
-        }
-      };
-      Wait.waitForCriterion(ev, 30 * 1000, 1000, true);
-      
-      ThreadUtils.join(thread1, 30 * 1000); // for completeness
-      ThreadUtils.join(thread2, 30 * 1000);
-      if (exceptionOccured) {
-        fail(" Test failed due to " + exceptionString);
+    Thread thread2 = new DoTake(hrq,1);
+    thread2.start(); //start take thread
+    ev = new WaitCriterion() {
+      public boolean done() {
+        return hrq.region.size() == 3;
       }
-      cache.close();
-    }
-    catch (Exception e) {
-      fail(" Test encountered an exception "+e);
+      public String description() {
+        return null;
+      }
+    };
+    //sleep. take will proceed and so will sleeping put
+    Wait.waitForCriterion(ev, 3 * 1000, 200, true);
+
+    // thread should have died since put should have proceeded
+    ev = new WaitCriterion() {
+      public boolean done() {
+        return !thread1.isAlive();
+      }
+      public String description() {
+        return "thread1 still alive";
+      }
+    };
+    Wait.waitForCriterion(ev, 30 * 1000, 1000, true);
+
+    ThreadUtils.join(thread1, 30 * 1000); // for completeness
+    ThreadUtils.join(thread2, 30 * 1000);
+    if (exceptionOccured) {
+      fail(" Test failed due to " + exceptionString);
     }
+    cache.close();
   }
 
   
@@ -167,72 +159,65 @@ public class BlockingHARegionJUnitTest
    * put simultaneously. They will reach a state where the queue is full and they will all
    * go in a wait state. the region size would be verified to be 20000 (10000 puts and 10000 DACE objects).
    * then the threads are interrupted and made to quit the loop
-   *
    */
   @Test
-  public void testConcurrentPutsNotExceedingLimit()
-  {
-    try {
-      exceptionOccured = false;
-      quitForLoop = false;
-      HARegionQueueAttributes harqa = new HARegionQueueAttributes();
-      harqa.setBlockingQueueCapacity(10000);
-      final HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
-          "BlockingHARegionJUnitTest_Region", cache, harqa,
-          HARegionQueue.BLOCKING_HA_QUEUE, false);      
-      hrq.setPrimary(true);//fix for 40314 - capacity constraint is checked for primary only.
-      Thread thread1 = new DoPuts(hrq,20000,1);
-      Thread thread2 = new DoPuts(hrq,20000,2);
-      Thread thread3 = new DoPuts(hrq,20000,3);
-      Thread thread4 = new DoPuts(hrq,20000,4);
-      Thread thread5 = new DoPuts(hrq,20000,5);
-      
-      thread1.start();
-      thread2.start();
-      thread3.start();
-      thread4.start();
-      thread5.start();
-      
-      WaitCriterion ev = new WaitCriterion() {
-        public boolean done() {
-          return hrq.region.size() == 20000;
-        }
-        public String description() {
-          return null;
-        }
-      };
-      Wait.waitForCriterion(ev, 30 * 1000, 200, true);
-      
-      assertTrue(thread1.isAlive());
-      assertTrue(thread2.isAlive());
-      assertTrue(thread3.isAlive());
-      assertTrue(thread4.isAlive());
-      assertTrue(thread5.isAlive());
-      
-      assertTrue(hrq.region.size()==20000);
-      
-      quitForLoop = true;
-      Thread.sleep(20000);
-      
-      thread1.interrupt();
-      thread2.interrupt();
-      thread3.interrupt();
-      thread4.interrupt();
-      thread5.interrupt();
-      
-      Thread.sleep(2000);
-      
-      ThreadUtils.join(thread1, 5 * 60 * 1000);
-      ThreadUtils.join(thread2, 5 * 60 * 1000);
-      ThreadUtils.join(thread3, 5 * 60 * 1000);
-      ThreadUtils.join(thread4, 5 * 60 * 1000);
-      ThreadUtils.join(thread5, 5 * 60 * 1000);
-      
-      cache.close();
-    }
-    catch (Exception e) {
-      fail(" Test encountered an exception "+e);
-    }
+  public void testConcurrentPutsNotExceedingLimit() throws Exception {
+    exceptionOccured = false;
+    quitForLoop = false;
+    HARegionQueueAttributes harqa = new HARegionQueueAttributes();
+    harqa.setBlockingQueueCapacity(10000);
+    final HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
+        "BlockingHARegionJUnitTest_Region", cache, harqa,
+        HARegionQueue.BLOCKING_HA_QUEUE, false);
+    hrq.setPrimary(true);//fix for 40314 - capacity constraint is checked for primary only.
+    Thread thread1 = new DoPuts(hrq,20000,1);
+    Thread thread2 = new DoPuts(hrq,20000,2);
+    Thread thread3 = new DoPuts(hrq,20000,3);
+    Thread thread4 = new DoPuts(hrq,20000,4);
+    Thread thread5 = new DoPuts(hrq,20000,5);
+
+    thread1.start();
+    thread2.start();
+    thread3.start();
+    thread4.start();
+    thread5.start();
+
+    WaitCriterion ev = new WaitCriterion() {
+      public boolean done() {
+        return hrq.region.size() == 20000;
+      }
+      public String description() {
+        return null;
+      }
+    };
+    Wait.waitForCriterion(ev, 30 * 1000, 200, true);
+
+    assertTrue(thread1.isAlive());
+    assertTrue(thread2.isAlive());
+    assertTrue(thread3.isAlive());
+    assertTrue(thread4.isAlive());
+    assertTrue(thread5.isAlive());
+
+    assertTrue(hrq.region.size()==20000);
+
+    quitForLoop = true;
+    Thread.sleep(20000);
+
+    thread1.interrupt();
+    thread2.interrupt();
+    thread3.interrupt();
+    thread4.interrupt();
+    thread5.interrupt();
+
+    Thread.sleep(2000);
+
+    ThreadUtils.join(thread1, 5 * 60 * 1000);
+    ThreadUtils.join(thread2, 5 * 60 * 1000);
+    ThreadUtils.join(thread3, 5 * 60 * 1000);
+    ThreadUtils.join(thread4, 5 * 60 * 1000);
+    ThreadUtils.join(thread5, 5 * 60 * 1000);
+
+    cache.close();
   }
   
   /**
@@ -241,95 +226,87 @@ public class BlockingHARegionJUnitTest
    * put simultaneously. They will reach a state where the queue is full and they will all
    * go in a wait state. the region size would be verified to be 20000 (10000 puts and 10000 DACE objects).
    * then the threads are interrupted and made to quit the loop
-   *
-   *TODO:
-   *
    */
-  public void _testConcurrentPutsTakesNotExceedingLimit()
-  {
-    try {
-      exceptionOccured = false;
-      quitForLoop = false;
-      HARegionQueueAttributes harqa = new HARegionQueueAttributes();
-      harqa.setBlockingQueueCapacity(10000);
-      final HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
-          "BlockingHARegionJUnitTest_Region", cache, harqa,
-          HARegionQueue.BLOCKING_HA_QUEUE, false);
-      Thread thread1 = new DoPuts(hrq,40000,1);
-      Thread thread2 = new DoPuts(hrq,40000,2);
-      Thread thread3 = new DoPuts(hrq,40000,3);
-      Thread thread4 = new DoPuts(hrq,40000,4);
-      Thread thread5 = new DoPuts(hrq,40000,5);
-      
-      Thread thread6 = new DoTake(hrq,5000);
-      Thread thread7 = new DoTake(hrq,5000);
-      Thread thread8 = new DoTake(hrq,5000);
-      Thread thread9 = new DoTake(hrq,5000);
-      Thread thread10 = new DoTake(hrq,5000);
-      
-      thread1.start();
-      thread2.start();
-      thread3.start();
-      thread4.start();
-      thread5.start();
-      
-      thread6.start();
-      thread7.start();
-      thread8.start();
-      thread9.start();
-      thread10.start();
-      
-      ThreadUtils.join(thread6, 30 * 1000);
-      ThreadUtils.join(thread7, 30 * 1000);
-      ThreadUtils.join(thread8, 30 * 1000);
-      ThreadUtils.join(thread9, 30 * 1000);
-      ThreadUtils.join(thread10, 30 * 1000);
-      
-      WaitCriterion ev = new WaitCriterion() {
-        public boolean done() {
-          return hrq.region.size() == 20000;  
-        }
-        public String description() {
-          return null;
-        }
-      };
-      Wait.waitForCriterion(ev, 30 * 1000, 200, true);
-      
-      assertTrue(thread1.isAlive());
-      assertTrue(thread2.isAlive());
-      assertTrue(thread3.isAlive());
-      assertTrue(thread4.isAlive());
-      assertTrue(thread5.isAlive());
-      
-      assertTrue(hrq.region.size()==20000);
-      
-      quitForLoop = true;
-      
-      Thread.sleep(2000);
-      
-      thread1.interrupt();
-      thread2.interrupt();
-      thread3.interrupt();
-      thread4.interrupt();
-      thread5.interrupt();
-      
-      Thread.sleep(2000);
-      
-      
-      ThreadUtils.join(thread1, 30 * 1000);
-      ThreadUtils.join(thread2, 30 * 1000);
-      ThreadUtils.join(thread3, 30 * 1000);
-      ThreadUtils.join(thread4, 30 * 1000);
-      ThreadUtils.join(thread5, 30 * 1000);
-      
-      cache.close();
-    }
-    catch (Exception e) {
-      fail(" Test encountered an exception "+e);
-    }
+  @Ignore("TODO: test is disabled")
+  @Test
+  public void testConcurrentPutsTakesNotExceedingLimit() throws Exception {
+    exceptionOccured = false;
+    quitForLoop = false;
+    HARegionQueueAttributes harqa = new HARegionQueueAttributes();
+    harqa.setBlockingQueueCapacity(10000);
+    final HARegionQueue hrq = HARegionQueue.getHARegionQueueInstance(
+        "BlockingHARegionJUnitTest_Region", cache, harqa,
+        HARegionQueue.BLOCKING_HA_QUEUE, false);
+    Thread thread1 = new DoPuts(hrq,40000,1);
+    Thread thread2 = new DoPuts(hrq,40000,2);
+    Thread thread3 = new DoPuts(hrq,40000,3);
+    Thread thread4 = new DoPuts(hrq,40000,4);
+    Thread thread5 = new DoPuts(hrq,40000,5);
+
+    Thread thread6 = new DoTake(hrq,5000);
+    Thread thread7 = new DoTake(hrq,5000);
+    Thread thread8 = new DoTake(hrq,5000);
+    Thread thread9 = new DoTake(hrq,5000);
+    Thread thread10 = new DoTake(hrq,5000);
+
+    thread1.start();
+    thread2.start();
+    thread3.start();
+    thread4.start();
+    thread5.start();
+
+    thread6.start();
+    thread7.start();
+    thread8.start();
+    thread9.start();
+    thread10.start();
+
+    ThreadUtils.join(thread6, 30 * 1000);
+    ThreadUtils.join(thread7, 30 * 1000);
+    ThreadUtils.join(thread8, 30 * 1000);
+    ThreadUtils.join(thread9, 30 * 1000);
+    ThreadUtils.join(thread10, 30 * 1000);
+
+    WaitCriterion ev = new WaitCriterion() {
+      public boolean done() {
+        return hrq.region.size() == 20000;
+      }
+      public String description() {
+        return null;
+      }
+    };
+    Wait.waitForCriterion(ev, 30 * 1000, 200, true);
+
+    assertTrue(thread1.isAlive());
+    assertTrue(thread2.isAlive());
+    assertTrue(thread3.isAlive());
+    assertTrue(thread4.isAlive());
+    assertTrue(thread5.isAlive());
+
+    assertTrue(hrq.region.size()==20000);
+
+    quitForLoop = true;
+
+    Thread.sleep(2000);
+
+    thread1.interrupt();
+    thread2.interrupt();
+    thread3.interrupt();
+    thread4.interrupt();
+    thread5.interrupt();
+
+    Thread.sleep(2000);
+
+
+    ThreadUtils.join(thread1, 30 * 1000);
+    ThreadUtils.join(thread2, 30 * 1000);
+    ThreadUtils.join(thread3, 30 * 1000);
+    ThreadUtils.join(thread4, 30 * 1000);
+    ThreadUtils.join(thread5, 30 * 1000);
+
+    cache.close();
   }
-  
-  
+
   /**
    * Tests the bug in HARegionQueue where the take side put permit is not being
    * incremented   when the event arriving at the queue which has optimistically
@@ -337,11 +314,9 @@ public class BlockingHARegionJUnitTest
    * has a sequence ID less than the last dispatched sequence ID. This event is
    * rightly rejected from entering the queue but the take permit also needs to
    * increase & a notify issued  
-   * 
-   */  
+   */
   @Test
-  public void testHARQMaxCapacity_Bug37627()
-  {
+  public void testHARQMaxCapacity_Bug37627() throws Exception {
     try {
       exceptionOccured = false;
       quitForLoop = false;
@@ -379,50 +354,39 @@ public class BlockingHARegionJUnitTest
         fail(" Test failed due to " + exceptionString);
       }
     }
-    catch (Exception e) {
-      fail(" Test failed due to " + e);
-    }
     finally {
       if (cache != null) {
         cache.close();
       }
     }
-
   }
   
-  
-
-  /** boolean to record an exception occurence in another thread**/
-  static volatile boolean exceptionOccured = false;
-/** StringBuffer to store the exception**/
-  static StringBuffer exceptionString = new StringBuffer();
-  /** boolen to quit the for loop**/
-  static volatile boolean quitForLoop = false;
-
   /**
    * class which does specified number of puts on the queue
-   *
    */
-  static class DoPuts extends Thread
-  {
+  private static class DoPuts extends Thread {
+
     HARegionQueue regionQueue = null;
     final int numberOfPuts;
+
     DoPuts(HARegionQueue haRegionQueue, int numberOfPuts) {
       this.regionQueue = haRegionQueue;
       this.numberOfPuts = numberOfPuts;
     }
-/**
- * region id can be specified to generate Thread unique events
- */
+
+    /**
+     * region id can be specified to generate Thread unique events
+     */
     int regionId = 0;
+
     DoPuts(HARegionQueue haRegionQueue, int numberOfPuts, int regionId) {
       this.regionQueue = haRegionQueue;
       this.numberOfPuts = numberOfPuts;
       this.regionId = regionId;
     }
-    
-    public void run()
-    {
+
+    @Override
+    public void run() {
       for (int i = 0; i < numberOfPuts; i++) {
         try {
           this.regionQueue.put(new ConflatableObject("" + i, "" + i,
@@ -445,10 +409,9 @@ public class BlockingHARegionJUnitTest
 
   /**
    * class which does a specified number of takes
-   *
    */
-  static class DoTake extends Thread
-  {
+  private static class DoTake extends Thread {
+
     final HARegionQueue regionQueue;
     final int numberOfTakes;
 
@@ -457,8 +420,8 @@ public class BlockingHARegionJUnitTest
       this.numberOfTakes = numberOfTakes;
     }
 
-    public void run()
-    {
+    @Override
+    public void run() {
       for (int i = 0; i < numberOfTakes; i++) {
         try {
           assertNotNull(this.regionQueue.take());

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
index a48b949..4f76c7a 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
@@ -16,7 +16,22 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache.server.CacheServer;
 import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
 import com.gemstone.gemfire.cache30.CacheTestCase;
@@ -24,13 +39,13 @@ import com.gemstone.gemfire.cache30.ClientServerTestCase;
 import com.gemstone.gemfire.distributed.DistributedSystem;
 import com.gemstone.gemfire.internal.AvailablePort;
 import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
-import com.gemstone.gemfire.test.dunit.*;
-import junit.framework.Assert;
-
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.IgnoredException;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.cache.internal.JUnit4CacheTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
  * This is a bug test for 36853 (Expiry logic in HA is used to expire early data
@@ -44,29 +59,27 @@ import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties
  * is set for delayed start. This will make some of the events in the queue
  * expire before dispatcher can start picking them up for delivery to the
  * client.
- * 
- * 
  */
-public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
-{
+@Category(DistributedTest.class)
+public class Bug36853EventsExpiryDUnitTest extends JUnit4CacheTestCase {
 
   /** Cache-server */
-  VM server = null;
+  private VM server = null;
 
   /** Client , connected to Cache-server */
-  VM client = null;
+  private VM client = null;
 
   /** Name of the test region */
-  private static final String REGION_NAME = "Bug36853EventsExpiryDUnitTest_region";
+  private static final String REGION_NAME = Bug36853EventsExpiryDUnitTest.class.getSimpleName() + "_region";
 
   /** The cache instance for test cases */
-  protected static Cache cache = null;
+  private static Cache cache = null;
 
   /** Boolean to indicate the client to proceed for validation */
-  protected static volatile boolean proceedForValidation = false;
+  private static volatile boolean proceedForValidation = false;
 
   /** Counter to indicate number of puts recieved by client */
-  protected static volatile int putsRecievedByClient;
+  private static volatile int putsRecievedByClient;
 
   /** The last key for operations, to notify for proceeding to validation */
   private static final String LAST_KEY = "LAST_KEY";
@@ -77,21 +90,6 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
   /** Number of puts done for the test */
   private static final int TOTAL_PUTS = 5;
 
-  /**
-   * Constructor
-   * 
-   * @param name
-   */
-  public Bug36853EventsExpiryDUnitTest(String name) {
-    super(name);
-  }
-
-  /**
-   * Sets up the cache-server and client for the test
-   * 
-   * @throws Exception -
-   *           thrown in any problem occurs in setUp
-   */
   @Override
   public final void preSetUp() throws Exception {
     disconnectAllFromDS();
@@ -126,12 +124,12 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
   /**
    * Creates cache and starts the bridge-server
    */
-  public static Integer createServerCache() throws Exception
+  private static Integer createServerCache() throws Exception
   {
     System.setProperty(HARegionQueue.REGION_ENTRY_EXPIRY_TIME, "1");
     System.setProperty("slowStartTimeForTesting", String
         .valueOf(DISPATCHER_SLOWSTART_TIME));
-    new Bug36853EventsExpiryDUnitTest("temp").createCache(new Properties());
+    new Bug36853EventsExpiryDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -156,12 +154,12 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
    * @throws Exception -
    *           thrown if any problem occurs in setting up the client
    */
-  public static void createClientCache(String hostName, Integer port)
+  private static void createClientCache(String hostName, Integer port)
     throws Exception {
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new Bug36853EventsExpiryDUnitTest("temp").createCache(props);
+    new Bug36853EventsExpiryDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     ClientServerTestCase.configureConnectionPool(factory, hostName, port.intValue(),-1, true, -1, 2, null);
@@ -203,7 +201,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
    * @throws Exception -
    *           thrown if any problem occurs in put operation
    */
-  public static void generateEvents() throws Exception
+  private static void generateEvents() throws Exception
   {
     String regionName = Region.SEPARATOR + REGION_NAME;
     Region region = cache.getRegion(regionName);
@@ -230,6 +228,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
    * @throws Exception -
    *           thrown if any exception occurs in test
    */
+  @Test
   public void testEventsExpiryBug() throws Exception
   {
     IgnoredException.addIgnoredException("Unexpected IOException");
@@ -242,7 +241,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
    * Waits for the listener to receive all events and validates that no
    * exception occured in client
    */
-  public static void validateEventCountAtClient() throws Exception
+  private static void validateEventCountAtClient() throws Exception
   {
     if (!proceedForValidation) {
       synchronized (Bug36853EventsExpiryDUnitTest.class) {
@@ -258,7 +257,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
       }
     }
     LogWriterUtils.getLogWriter().info("Starting validation on client2");
-    Assert.assertEquals(
+    assertEquals(
         "Puts recieved by client not equal to the puts done at server.",
         TOTAL_PUTS, putsRecievedByClient);
     LogWriterUtils.getLogWriter()
@@ -271,7 +270,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
    * Closes the cache
    * 
    */
-  public static void unSetExpiryTimeAndCloseCache()
+  private static void unSetExpiryTimeAndCloseCache()
   {    
     System.clearProperty(HARegionQueue.REGION_ENTRY_EXPIRY_TIME);
     CacheTestCase.closeCache();

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
index 15094bb..70bd8b4 100644
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
@@ -17,8 +17,20 @@
 package com.gemstone.gemfire.internal.cache.ha;
 
 import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
 
-import com.gemstone.gemfire.cache.*;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
 import com.gemstone.gemfire.cache.client.ClientCacheFactory;
 import com.gemstone.gemfire.cache.client.ClientRegionFactory;
 import com.gemstone.gemfire.cache.client.ClientRegionShortcut;
@@ -30,31 +42,28 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
 import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientNotifier;
 import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
 import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
-import com.gemstone.gemfire.test.dunit.*;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
-
-public class Bug48571DUnitTest extends DistributedTestCase {
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.IgnoredException;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.Wait;
+import com.gemstone.gemfire.test.dunit.WaitCriterion;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
+
+@Category(DistributedTest.class)
+public class Bug48571DUnitTest extends JUnit4DistributedTestCase {
 
   private static VM server = null;
   private VM client = null;
   private static GemFireCacheImpl cache = null;
   
-  private static final String region = "Bug48571DUnitTest_region";
+  private static final String region = Bug48571DUnitTest.class.getSimpleName() + "_region";
   private static int numOfCreates = 0;
   private static int numOfUpdates = 0;
   private static int numOfInvalidates = 0;
   private static boolean lastKeyReceived = false;
 
-  public Bug48571DUnitTest(String name) {
-    super(name);
-  }
-
   @Override
   public final void postSetUp() throws Exception {
     final Host host = Host.getHost(0);
@@ -99,13 +108,13 @@ public class Bug48571DUnitTest extends DistributedTestCase {
       }
       @Override
       public String description() {
-        // TODO Auto-generated method stub
         return "Proxy has not paused yet";
       }
     };
     Wait.waitForCriterion(criterion, 15 * 1000, 200, true);
   }
   
+  @Test
   public void testStatsMatchWithSize() throws Exception {
     IgnoredException.addIgnoredException("Unexpected IOException||Connection reset");
     // start a server
@@ -128,7 +137,6 @@ public class Bug48571DUnitTest extends DistributedTestCase {
     server.invoke(() -> Bug48571DUnitTest.verifyStats());
   }
 
-
   public static int createServerCache() throws Exception {
     Properties props = new Properties();
     props.setProperty(LOCATORS, "localhost[" + DistributedTestUtils.getDUnitLocatorPort() + "]");
@@ -139,7 +147,7 @@ public class Bug48571DUnitTest extends DistributedTestCase {
     props.setProperty(STATISTIC_SAMPLING_ENABLED, "true");
     CacheFactory cf = new CacheFactory(props);
 
-    DistributedSystem ds = new Bug48571DUnitTest("Bug48571DUnitTest").getSystem(props);
+    DistributedSystem ds = new Bug48571DUnitTest().getSystem(props);
     ds.disconnect();
 
     cache = (GemFireCacheImpl)cf.create();
@@ -178,7 +186,7 @@ public class Bug48571DUnitTest extends DistributedTestCase {
     ccf.setPoolSubscriptionRedundancy(0);
     ccf.addPoolServer(host.getHostName(), port);
 
-    DistributedSystem ds = new Bug48571DUnitTest("Bug48571DUnitTest").getSystem(props);
+    DistributedSystem ds = new Bug48571DUnitTest().getSystem(props);
     ds.disconnect();
 
     cache = (GemFireCacheImpl) ccf.create();
@@ -267,7 +275,6 @@ public class Bug48571DUnitTest extends DistributedTestCase {
     Wait.waitForCriterion(wc, 60*1000, 500, true);
   }
 
-
   public static void verifyStats() throws Exception {
     CacheClientNotifier ccn = CacheClientNotifier.getInstance();
     CacheClientProxy ccp = ccn.getClientProxies().iterator().next();

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48879DUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48879DUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48879DUnitTest.java
index c26cc59..2e5feec 100644
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48879DUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48879DUnitTest.java
@@ -17,6 +17,12 @@
 package com.gemstone.gemfire.internal.cache.ha;
 
 import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import com.gemstone.gemfire.cache.CacheFactory;
 import com.gemstone.gemfire.cache.Region;
@@ -33,14 +39,14 @@ import com.gemstone.gemfire.internal.OSProcess;
 import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
 import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientNotifier;
 import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
-import com.gemstone.gemfire.test.dunit.DistributedTestCase;
 import com.gemstone.gemfire.test.dunit.Host;
 import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
-import java.util.Properties;
-
+@Category(DistributedTest.class)
 @SuppressWarnings("serial")
-public class Bug48879DUnitTest extends DistributedTestCase {
+public class Bug48879DUnitTest extends JUnit4DistributedTestCase {
 
   private static VM vm0 = null;
   private static VM vm1 = null;
@@ -51,8 +57,8 @@ public class Bug48879DUnitTest extends DistributedTestCase {
 
   public static final int SLEEP_TIME = 40000;
 
-  public Bug48879DUnitTest(String name) {
-    super(name);
+  public Bug48879DUnitTest() {
+    super();
   }
 
   @Override
@@ -85,9 +91,8 @@ public class Bug48879DUnitTest extends DistributedTestCase {
   }
 
   @SuppressWarnings({ "unused", "deprecation" })
-  public static Integer createCacheServer()
-      throws Exception {
-    Bug48879DUnitTest test = new Bug48879DUnitTest("Bug48879DUnitTest");
+  public static Integer createCacheServer() throws Exception {
+    Bug48879DUnitTest test = new Bug48879DUnitTest();
     System.setProperty(DistributionConfig.GEMFIRE_PREFIX + "MessageTimeToLive", "30");
     cache = (GemFireCacheImpl)CacheFactory.create(test.getSystem());
     HARegionQueue.threadIdExpiryTime = (SLEEP_TIME/1000) - 10;
@@ -113,7 +118,7 @@ public class Bug48879DUnitTest extends DistributedTestCase {
         + ".gfs");
     props.setProperty(STATISTIC_SAMPLING_ENABLED, "true");
 
-    DistributedSystem ds = new Bug48879DUnitTest("Bug48879DUnitTest").getSystem(props);
+    DistributedSystem ds = new Bug48879DUnitTest().getSystem(props);
     ds.disconnect();
     ClientCacheFactory ccf = new ClientCacheFactory(props);
     ccf.setPoolSubscriptionEnabled(doRI);
@@ -201,6 +206,7 @@ public class Bug48879DUnitTest extends DistributedTestCase {
         actualTids >= expectedTids);
   }
 
+  @Test
   public void testThreadIdentfiersExpiry() throws Exception {
     // create server1 and server2
     // create client with redundancy = 1

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
index 00c488a..9eb0982 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
@@ -16,6 +16,27 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
+import static org.junit.Assert.*;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.AttributesMutator;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.MirrorType;
+import com.gemstone.gemfire.cache.Operation;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.RegionDestroyedException;
+import com.gemstone.gemfire.cache.RegionEvent;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache.*;
 import com.gemstone.gemfire.cache.client.PoolManager;
 import com.gemstone.gemfire.cache.client.internal.Connection;
@@ -29,6 +50,12 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
 import com.gemstone.gemfire.internal.AvailablePort;
 import com.gemstone.gemfire.internal.cache.EventID;
 import com.gemstone.gemfire.internal.cache.EventIDHolder;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 import com.gemstone.gemfire.test.dunit.*;
 
 import java.util.Iterator;
@@ -49,11 +76,9 @@ import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties
  * sent to server-1 and then to server-2 via p2p and then finally to client-2.
  * It is verified that client-2 recieves the same values for thread-id and
  * sequence-id.
- * 
- * 
  */
-public class EventIdOptimizationDUnitTest extends DistributedTestCase
-{
+@Category(DistributedTest.class)
+public class EventIdOptimizationDUnitTest extends JUnit4DistributedTestCase {
 
   /** Cache-server1 */
   VM server1 = null;
@@ -135,17 +160,6 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
       new EventID(new byte[] { 1, 1 }, ID_VALUE_LONG, ID_VALUE_INT),
       new EventID(new byte[] { 1, 1 }, ID_VALUE_LONG, ID_VALUE_LONG) };
 
-  /** Constructor */
-  public EventIdOptimizationDUnitTest(String name) {
-    super(name);
-  }
-
-  /**
-   * Sets up the cache-servers and clients for the test
-   * 
-   * @throws Exception -
-   *           thrown in any problem occurs in setUp
-   */
   @Override
   public final void postSetUp() throws Exception  {
     disconnectAllFromDS();
@@ -181,7 +195,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
   /** Creates cache and starts the bridge-server */
   public static Integer createServerCache() throws Exception
   {
-    new EventIdOptimizationDUnitTest("temp").createCache(new Properties());
+    new EventIdOptimizationDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -215,7 +229,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new EventIdOptimizationDUnitTest("temp").createCache(props);
+    new EventIdOptimizationDUnitTest().createCache(props);
 
     AttributesFactory factory = new AttributesFactory();
     ClientServerTestCase.configureConnectionPool(factory, hostName, port.intValue(),-1, true, -1, 2, null);
@@ -238,7 +252,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new EventIdOptimizationDUnitTest("temp").createCache(props);
+    new EventIdOptimizationDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     ClientServerTestCase.configureConnectionPool(factory, hostName, port.intValue(),-1, true, -1, 2, null);
     
@@ -368,6 +382,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
    * @throws Exception -
    *           thrown if any exception occurs in test
    */
+  @Test
   public void testEventIdOptimizationByPutOperation() throws Exception
   {
     client1.invoke(() -> EventIdOptimizationDUnitTest.generateEventsByPutOperation());
@@ -383,6 +398,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
    * @throws Exception -
    *           thrown if any exception occurs in test
    */
+  @Test
   public void testEventIdOptimizationByDestroyEntryOperation() throws Exception
   {
     client1.invoke(() -> EventIdOptimizationDUnitTest.generateEventsByDestroyEntryOperation());
@@ -397,6 +413,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
    * @throws Exception -
    *           thrown if any exception occurs in test
    */
+  @Test
   public void testEventIdOptimizationByDestroyRegionOperation()
       throws Exception
   {
@@ -412,6 +429,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
    * @throws Exception -
    *           thrown if any exception occurs in test
    */
+  @Test
   public void testEventIdOptimizationByClearRegionOperation() throws Exception
   {
     client1.invoke(() -> EventIdOptimizationDUnitTest.generateEventsByClearRegionOperation());

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
index 300ad4b..9e59ba7 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
@@ -16,7 +16,23 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static com.gemstone.gemfire.test.dunit.Assert.*;
+
+import java.util.Iterator;
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache.client.PoolManager;
 import com.gemstone.gemfire.cache.client.internal.PoolImpl;
 import com.gemstone.gemfire.cache.server.CacheServer;
@@ -28,24 +44,23 @@ import com.gemstone.gemfire.internal.cache.ClientServerObserverAdapter;
 import com.gemstone.gemfire.internal.cache.ClientServerObserverHolder;
 import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
 import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
-import com.gemstone.gemfire.test.dunit.*;
-
-import java.util.Iterator;
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.Assert;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.Wait;
+import com.gemstone.gemfire.test.dunit.WaitCriterion;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
- *
- *  Dunit test to verify HA feature. Have 2 nodes S1 & S2. Client is connected to S1 & S2 with S1 as the primary end point.
- *  Do some puts on S1 .The expiry is on high side. Stop S1 , the client is failing to S2.During fail over duration do some
- *  puts on S1. The client on failing to S2 may receive duplicate events but should not miss any events.
- *
- *
+ * Dunit test to verify HA feature. Have 2 nodes S1 & S2. Client is connected to S1 & S2 with S1 as the primary end point.
+ * Do some puts on S1 .The expiry is on high side. Stop S1 , the client is failing to S2.During fail over duration do some
+ * puts on S1. The client on failing to S2 may receive duplicate events but should not miss any events.
  */
-public class FailoverDUnitTest extends DistributedTestCase
-{
+@Category(DistributedTest.class)
+public class FailoverDUnitTest extends JUnit4DistributedTestCase {
+
   protected static Cache cache = null;
   //server
   private static VM vm0 = null;
@@ -57,11 +72,6 @@ public class FailoverDUnitTest extends DistributedTestCase
 
   private static final String regionName = "interestRegion";
 
-  /** constructor */
-  public FailoverDUnitTest(String name) {
-    super(name);
-  }
-
   @Override
   public final void postSetUp() throws Exception {
     final Host host = Host.getHost(0);
@@ -88,6 +98,7 @@ public class FailoverDUnitTest extends DistributedTestCase
     }
   }
 
+  @Test
   public void testFailover()
   {
     createEntries();
@@ -117,19 +128,8 @@ public class FailoverDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new FailoverDUnitTest("temp").createCache(props);
-
-    /*props.setProperty("retryAttempts", "5");
-    props.setProperty("endpoints", "ep1=" + hostName + ":"+PORT1+",ep2="
-        + hostName + ":"+PORT2);
-    props.setProperty("redundancyLevel", "-1");
-    props.setProperty("establishCallbackConnection", "true");
-    props.setProperty("LBPolicy", "RoundRobin");
-    props.setProperty("readTimeout", "250");
-    props.setProperty("socketBufferSize", "32768");
-    props.setProperty("retryInterval", "1000");
-    props.setProperty("connectionsPerServer", "2");
-*/
+    new FailoverDUnitTest().createCache(props);
+
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     ClientServerTestCase.configureConnectionPoolWithName(factory, hostName, new int[] {PORT1,PORT2}, true, -1, 2, null, "FailoverPool");
@@ -147,7 +147,7 @@ public class FailoverDUnitTest extends DistributedTestCase
 
   public static Integer createServerCache() throws Exception
   {
-    new FailoverDUnitTest("temp").createCache(new Properties());
+    new FailoverDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -217,7 +217,6 @@ public class FailoverDUnitTest extends DistributedTestCase
     }
   }
 
-
   public static void stopServer()
   {
     try {
@@ -228,7 +227,7 @@ public class FailoverDUnitTest extends DistributedTestCase
       }
     }
     catch (Exception e) {
-      fail("failed while stopServer()" + e);
+      fail("failed while stopServer()", e);
     }
   }
 
@@ -275,7 +274,7 @@ public class FailoverDUnitTest extends DistributedTestCase
           PoolImpl.BEFORE_PRIMARY_IDENTIFICATION_FROM_BACKUP_CALLBACK_FLAG = false;
         }
     });
-}
+  }
 
   public static void putDuringFailover()
   {
@@ -308,7 +307,6 @@ public class FailoverDUnitTest extends DistributedTestCase
     assertEquals("value-4", r.getEntry("key-4").getValue());
   }
 
-
   @Override
   public final void preTearDown() throws Exception {
     // close the clients first

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
index ecce913..c16d7a0 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
@@ -16,52 +16,53 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
 import com.gemstone.gemfire.cache30.ClientServerTestCase;
 import com.gemstone.gemfire.distributed.DistributedSystem;
 import com.gemstone.gemfire.internal.AvailablePort;
 import com.gemstone.gemfire.internal.cache.CacheServerImpl;
-import com.gemstone.gemfire.test.dunit.DistributedTestCase;
 import com.gemstone.gemfire.test.dunit.Host;
 import com.gemstone.gemfire.test.dunit.NetworkUtils;
 import com.gemstone.gemfire.test.dunit.VM;
-
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
  * This Dunit test is to verify the bug in put() operation. When the put is invoked on the server
  * and NotifyBySubscription is false then it follows normal path and then again calls put of region
- * on which regionqueue is based. so recurssion is happening.
- *
+ * on which region queue is based. so recurssion is happening.
  */
+@Category(DistributedTest.class)
+public class HABugInPutDUnitTest extends JUnit4DistributedTestCase {
 
-public class HABugInPutDUnitTest extends DistributedTestCase
-{
-
-  VM server1 = null;
+  private static final String REGION_NAME = HABugInPutDUnitTest.class.getSimpleName() + "_region";
 
-  VM server2 = null;
-
-  VM client1 = null;
-
-  VM client2 = null;
-
-  private static final String REGION_NAME = "HABugInPutDUnitTest_region";
+  private VM server1 = null;
+  private VM server2 = null;
+  private VM client1 = null;
+  private VM client2 = null;
 
   final static String KEY1 = "KEY1";
-
   final static String VALUE1 = "VALUE1";
 
   protected static Cache cache = null;
 
-  public HABugInPutDUnitTest(String name) {
-    super(name);
-  }
-
   @Override
   public final void postSetUp() throws Exception {
 	  final Host host = Host.getHost(0);
@@ -118,7 +119,7 @@ public class HABugInPutDUnitTest extends DistributedTestCase
 
   public static Integer createServerCache() throws Exception
   {
-    new HABugInPutDUnitTest("temp").createCache(new Properties());
+    new HABugInPutDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -141,7 +142,7 @@ public class HABugInPutDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new HABugInPutDUnitTest("temp").createCache(props);
+    new HABugInPutDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     ClientServerTestCase.configureConnectionPool(factory, hostName, new int[] {PORT1,PORT2}, true, -1, 2, null);
@@ -150,9 +151,9 @@ public class HABugInPutDUnitTest extends DistributedTestCase
     Region region = cache.getRegion(Region.SEPARATOR + REGION_NAME);
     assertNotNull(region);
     region.registerInterest(KEY1);
-
   }
 
+  @Test
   public void testBugInPut() throws Exception
   {
     client1.invoke(new CacheSerializableRunnable("putFromClient1") {

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
index 2235ca2..4c42a36 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
@@ -16,7 +16,24 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.InterestResultPolicy;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.RegionEvent;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
 import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
 import com.gemstone.gemfire.cache30.ClientServerTestCase;
@@ -27,22 +44,19 @@ import com.gemstone.gemfire.internal.cache.CacheObserverHolder;
 import com.gemstone.gemfire.internal.cache.CacheServerImpl;
 import com.gemstone.gemfire.internal.cache.LocalRegion;
 import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
-import com.gemstone.gemfire.test.dunit.*;
-
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
  * This is the Dunit test to verify clear and destroyRegion operation in
  * Client-Server configuration.
- *
- *
  */
-
-public class HAClearDUnitTest extends DistributedTestCase
-{
+@Category(DistributedTest.class)
+public class HAClearDUnitTest extends JUnit4DistributedTestCase {
 
   static VM server1 = null;
 
@@ -70,10 +84,6 @@ public class HAClearDUnitTest extends DistributedTestCase
 
   static boolean gotDestroyRegionCallback = false;
 
-  public HAClearDUnitTest(String name) {
-    super(name);
-  }
-
   @Override
   public final void postSetUp() throws Exception {
     final Host host = Host.getHost(0);
@@ -100,12 +110,14 @@ public class HAClearDUnitTest extends DistributedTestCase
     closeCache();
   }
 
-  /* The test perorms following operations
+  /**
+   * The test perorms following operations
    * 1. Create 2 servers and 3 client
    * 2. Perform put operations for knows set of keys directy from the client1.
    * 3. Perform clear operation from client1
    * 4. verify the result of operation for other clients and other servers.
    */
+  @Test
   public void testClearWithOperationFromClient() throws Exception
   {
     createClientServerConfigurationForClearTest();
@@ -171,12 +183,14 @@ public class HAClearDUnitTest extends DistributedTestCase
     server2.invoke(checkSizeRegion(regionSize));
   }
 
-  /* The test perorms following operations
+  /**
+   * The test performs following operations
    * 1. Create 2 servers and 3 clients
    * 2. Perform put operations for known set of keys directy from the server1.
    * 3. Perform clear operation from server1
    * 4. verify the result of operation for other clients and other servers.
    */
+  @Test
   public void testClearWithOperationFromServer() throws Exception
   {
     createClientServerConfigurationForClearTest();
@@ -260,12 +274,14 @@ public class HAClearDUnitTest extends DistributedTestCase
   }
 
 
-  /* The test perorms following operations
+  /**
+   * The test performs following operations
    * 1. Create 2 servers and 3 client
    * 2. Perform put operations for knows set of keys directy from the client1.
    * 3. Perform destroyRegion operation from client1
    * 4. verify the result of operation for other clients and other servers.
    */
+  @Test
   public void testDestroyRegionWithOperationFromClient() throws Exception
   {
     createClientServerConfigurationForClearTest();
@@ -334,12 +350,14 @@ public class HAClearDUnitTest extends DistributedTestCase
   }
 
 
-  /* The test perorms following operations
+  /**
+   * The test performs following operations
    * 1. Create 2 servers and 3 clients
    * 2. Perform put operations for known set of keys directy from the server1.
    * 3. Perform destroyRegion operation from server1
    * 4. verify the result of operation for other clients and other servers.
    */
+  @Test
   public void testDestroyRegionWithOperationFromServer() throws Exception
   {
     createClientServerConfigurationForClearTest();
@@ -556,7 +574,7 @@ public class HAClearDUnitTest extends DistributedTestCase
   public static Integer createServerCache()
       throws Exception
   {
-    new HAClearDUnitTest("temp").createCache(new Properties());
+    new HAClearDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -590,7 +608,7 @@ public class HAClearDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new HAClearDUnitTest("temp").createCache(props);
+    new HAClearDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     ClientServerTestCase.configureConnectionPool(factory, hostName, new int[] {PORT1,PORT2}, true, -1, 2, null);

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
index 33b84d9..dca557c 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
@@ -16,20 +16,37 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Properties;
+
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheListener;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.Declarable;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.RegionEvent;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
-import com.gemstone.gemfire.cache30.CacheTestCase;
 import com.gemstone.gemfire.cache30.ClientServerTestCase;
 import com.gemstone.gemfire.distributed.DistributedSystem;
 import com.gemstone.gemfire.internal.AvailablePort;
 import com.gemstone.gemfire.internal.cache.CacheServerImpl;
 import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
-import com.gemstone.gemfire.test.dunit.*;
-
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.Wait;
+import com.gemstone.gemfire.test.dunit.WaitCriterion;
+import com.gemstone.gemfire.test.dunit.cache.internal.JUnit4CacheTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
  * This is Targetted conflation Dunit test.
@@ -42,12 +59,9 @@ import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties
  *  3) Do create , then update, update, invalidate. The client should receive 3 callbacks, one for create one for the last update
  *     and one for the invalidate.
  *  4) Do a create , update , update & destroy. The client should receive 3 callbacks ( craete , conflated update & destroy).
- *
- *
  */
-
-public class HAConflationDUnitTest extends CacheTestCase
-{
+@Category(DistributedTest.class)
+public class HAConflationDUnitTest extends JUnit4CacheTestCase {
 
   VM server1 = null;
 
@@ -55,19 +69,19 @@ public class HAConflationDUnitTest extends CacheTestCase
 
   private static final String regionName = "HAConflationDUnitTest_region";
 
-  final static String KEY1 = "KEY1";
+  static final String KEY1 = "KEY1";
 
-  final static String KEY2 = "KEY2";
+  static final String KEY2 = "KEY2";
 
-  final static String KEY3 = "KEY3";
+  static final String KEY3 = "KEY3";
 
-  final static String VALUE1 = "VALUE1";
+  static final String VALUE1 = "VALUE1";
 
-  final static String VALUE2 = "VALUE2";
+  static final String VALUE2 = "VALUE2";
 
-  final static String VALUE3 = "VALUE3";
+  static final String VALUE3 = "VALUE3";
 
-  final static String LAST_KEY = "lastkey";
+  static final String LAST_KEY = "lastkey";
 
   static final String LAST_VALUE = "lastvalue";
 
@@ -79,8 +93,8 @@ public class HAConflationDUnitTest extends CacheTestCase
 
   static int actualNoEvents = 0;
 
-  public HAConflationDUnitTest(String name) {
-    super(name);
+  public HAConflationDUnitTest() {
+    super();
   }
 
   @Override
@@ -119,6 +133,7 @@ public class HAConflationDUnitTest extends CacheTestCase
    * @throws Exception
    */
 
+  @Test
   public void testConflationCreateUpdate() throws Exception
   {
     server1.invoke(putFromServer(KEY1, VALUE1));
@@ -134,6 +149,7 @@ public class HAConflationDUnitTest extends CacheTestCase
    * The client should receive 2 callbacks , one for create & one for the last update.
    * @throws Exception
    */
+  @Test
   public void testConflationUpdate() throws Exception
   {
 
@@ -158,6 +174,7 @@ public class HAConflationDUnitTest extends CacheTestCase
    * and one for the invalidate.
    * @throws Exception
    */
+  @Test
   public void testConflationCreateUpdateInvalidate() throws Exception
   {
 
@@ -176,6 +193,7 @@ public class HAConflationDUnitTest extends CacheTestCase
    * The client should receive 3 callbacks ( craete , conflated update & destroy).
    * @throws Exception
    */
+  @Test
   public void testConflationCreateUpdateDestroy() throws Exception
   {
 
@@ -297,7 +315,7 @@ public class HAConflationDUnitTest extends CacheTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new HAConflationDUnitTest("temp").createCache(props);
+    new HAConflationDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     ClientServerTestCase.configureConnectionPool(factory, host, new int[] { PORT1 }, true, -1, -1, null);
     factory.setScope(Scope.DISTRIBUTED_ACK);
@@ -323,7 +341,7 @@ public class HAConflationDUnitTest extends CacheTestCase
   public static Integer createServerCache(Boolean isListenerPresent)
       throws Exception
   {
-    new HAConflationDUnitTest("temp").createCache(new Properties());
+    new HAConflationDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setEnableConflation(true);

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
index b6100b3..54f18cd 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
@@ -16,21 +16,40 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.CacheListener;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.InterestResultPolicy;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
 import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
 import com.gemstone.gemfire.cache30.ClientServerTestCase;
 import com.gemstone.gemfire.distributed.DistributedSystem;
 import com.gemstone.gemfire.internal.AvailablePort;
 import com.gemstone.gemfire.internal.cache.CacheServerImpl;
-import com.gemstone.gemfire.test.dunit.*;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.IgnoredException;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
  * This is the Dunit test to verify the duplicates after the fail over
@@ -39,12 +58,9 @@ import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties
  * 2. Perform put operations for knows set of keys directy from the server1.
  * 3. Stop the server1 so that fail over happens
  * 4. Validate the duplicates received by the client1
- *
- *
  */
-
-public class HADuplicateDUnitTest extends DistributedTestCase
-{
+@Category(DistributedTest.class)
+public class HADuplicateDUnitTest extends JUnit4DistributedTestCase {
 
   VM server1 = null;
 
@@ -72,10 +88,6 @@ public class HADuplicateDUnitTest extends DistributedTestCase
 
   static Map storeEvents = new HashMap();
 
-  public HADuplicateDUnitTest(String name) {
-    super(name);
-  }
-
   @Override
   public final void postSetUp() throws Exception {
     final Host host = Host.getHost(0);
@@ -98,8 +110,9 @@ public class HADuplicateDUnitTest extends DistributedTestCase
     server2.invoke(() -> HADuplicateDUnitTest.closeCache());
   }
 
-  public void _testDuplicate() throws Exception
-  {
+  @Ignore("TODO")
+  @Test
+  public void testDuplicate() throws Exception {
     createClientServerConfiguration();
     server1.invoke(putForKnownKeys());
     server1.invoke(stopServer());
@@ -138,10 +151,9 @@ public class HADuplicateDUnitTest extends DistributedTestCase
     server1.invoke(() -> HADuplicateDUnitTest.reSetQRMslow());
   }
 
-
+  @Test
   public void testSample() throws Exception
   {
-
     IgnoredException.addIgnoredException("IOException");
     IgnoredException.addIgnoredException("Connection reset");
     createClientServerConfiguration();
@@ -155,10 +167,8 @@ public class HADuplicateDUnitTest extends DistributedTestCase
 
     }
     });
-
   }
 
-
   // function to perform put operations for the known set of keys.
   private CacheSerializableRunnable putForKnownKeys()
   {
@@ -196,8 +206,6 @@ public class HADuplicateDUnitTest extends DistributedTestCase
     return stopserver;
   }
 
-
-
   // function to create 2servers and 1 clients
   private void createClientServerConfiguration()
   {
@@ -222,7 +230,7 @@ public class HADuplicateDUnitTest extends DistributedTestCase
 
   public static Integer createServerCache() throws Exception
   {
-    new HADuplicateDUnitTest("temp").createCache(new Properties());
+    new HADuplicateDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -255,7 +263,7 @@ public class HADuplicateDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new HADuplicateDUnitTest("temp").createCache(props);
+    new HADuplicateDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     ClientServerTestCase.configureConnectionPool(factory, hostName, new int[] {PORT1,PORT2}, true, -1, 2, null);
     
@@ -278,9 +286,10 @@ public class HADuplicateDUnitTest extends DistributedTestCase
       cache.getDistributedSystem().disconnect();
     }
   }
-
 }
 
+// TODO: move these classes to be inner static classes
+
 // Listener class for the validation purpose
 class HAValidateDuplicateListener extends CacheListenerAdapter
 {
@@ -288,7 +297,6 @@ class HAValidateDuplicateListener extends CacheListenerAdapter
   {
     System.out.println("After Create");
     HADuplicateDUnitTest.storeEvents.put(event.getKey(), event.getNewValue());
-
   }
 
   public void afterUpdate(EntryEvent event)

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/b914df23/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
index a5e9b41..d45e66e 100755
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
@@ -16,7 +16,30 @@
  */
 package com.gemstone.gemfire.internal.cache.ha;
 
-import com.gemstone.gemfire.cache.*;
+import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.*;
+import static com.gemstone.gemfire.test.dunit.Assert.*;
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.CacheListener;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.InterestResultPolicy;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.RegionEvent;
+import com.gemstone.gemfire.cache.Scope;
 import com.gemstone.gemfire.cache.client.PoolManager;
 import com.gemstone.gemfire.cache.client.internal.PoolImpl;
 import com.gemstone.gemfire.cache.client.internal.QueueStateImpl.SequenceIdAndExpirationObject;
@@ -30,25 +53,23 @@ import com.gemstone.gemfire.internal.cache.EntryEventImpl;
 import com.gemstone.gemfire.internal.cache.EventID;
 import com.gemstone.gemfire.internal.cache.RegionEventImpl;
 import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
-import com.gemstone.gemfire.test.dunit.*;
-
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.LOCATORS;
-import static com.gemstone.gemfire.distributed.DistributedSystemConfigProperties.MCAST_PORT;
+import com.gemstone.gemfire.test.dunit.Assert;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.Wait;
+import com.gemstone.gemfire.test.dunit.WaitCriterion;
+import com.gemstone.gemfire.test.dunit.internal.JUnit4DistributedTestCase;
+import com.gemstone.gemfire.test.junit.categories.DistributedTest;
 
 /**
- *
  * Test to verify correct propagation of EventID from server to client
  *
  * @since GemFire 5.1
  */
-
-public class HAEventIdPropagationDUnitTest extends DistributedTestCase
-{
+@Category(DistributedTest.class)
+public class HAEventIdPropagationDUnitTest extends JUnit4DistributedTestCase {
 
   /** server VM * */
   VM server1 = null;
@@ -57,7 +78,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
   VM client1 = null;
 
   /** region name* */
-  private static final String REGION_NAME = "HAEventIdPropagationDUnitTest_Region";
+  private static final String REGION_NAME = HAEventIdPropagationDUnitTest.class.getSimpleName() + "_Region";
 
   /** cache * */
   private static Cache cache = null;
@@ -65,11 +86,6 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
   /** server * */
   static CacheServerImpl server = null;
 
-  /** test constructor * */
-  public HAEventIdPropagationDUnitTest(String name) {
-    super(name);
-  }
-
   /** get the hosts and the VMs * */
   @Override
   public final void postSetUp() throws Exception {
@@ -114,7 +130,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
   /** create the server * */
   public static Integer createServerCache() throws Exception
   {
-    new HAEventIdPropagationDUnitTest("temp").createCache(new Properties());
+    new HAEventIdPropagationDUnitTest().createCache(new Properties());
     AttributesFactory factory = new AttributesFactory();
     factory.setScope(Scope.DISTRIBUTED_ACK);
     factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -151,7 +167,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
     Properties props = new Properties();
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
-    new HAEventIdPropagationDUnitTest("temp").createCache(props);
+    new HAEventIdPropagationDUnitTest().createCache(props);
     AttributesFactory factory = new AttributesFactory();
     PoolImpl pi = (PoolImpl)ClientServerTestCase.configureConnectionPool(factory, hostName, new int[] {PORT1}, true, -1, 2, null);
     factory.setScope(Scope.DISTRIBUTED_ACK);
@@ -177,7 +193,6 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
     }
   }
 
-
   /**
    * function to assert that the ThreadIdtoSequence id Map is not Null but is
    * empty *
@@ -202,7 +217,6 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       LogWriterUtils.getLogWriter().info("assertThreadIdToSequenceIdMapisNotNullButEmpty: map size is " + map.size());
       assertTrue(map.size() == 1);
     }
-
   }
 
   /**
@@ -293,9 +307,8 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
    * server 5) asserts that the ThreadIdToSequenceIdMap is not null and has one
    * entry (on the client side) and returns the eventId stored in the map 6)
    * verifies the equality of the two event ids
-   *
-   * @throws Exception
    */
+  @Test
   public void testEventIDPropagation() throws Exception
   {
     try {
@@ -353,7 +366,6 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
         }
       }
 
-
       client1.invoke(() -> HAEventIdPropagationDUnitTest.setReceivedOperationToFalse());
       eventId1 = server1.invoke(() -> HAEventIdPropagationDUnitTest.removePUTALL_KEY1());
       assertNotNull(eventId1);
@@ -385,6 +397,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
   }
 
 
+  @Test
   public void testEventIDPropagationForClear() throws Exception
   {
     createClientServerConfiguration();
@@ -400,12 +413,11 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
     if (!eventId1.equals(eventId2)) {
       fail("Test failed as the clear eventIds are not equal");
     }
-
   }
 
-
-  public void _testEventIDPropagationForDestroyRegion() throws Exception
-  {
+  @Ignore("TODO: test is disabled but passes when run")
+  @Test
+  public void testEventIDPropagationForDestroyRegion() throws Exception {
     createClientServerConfiguration();
     client1.invoke(() -> HAEventIdPropagationDUnitTest.setReceivedOperationToFalse());
     Object eventId1 = server1.invoke(() -> HAEventIdPropagationDUnitTest.destroyRegion());
@@ -418,8 +430,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
     if (!eventId1.equals(eventId2)) {
       fail("Test failed as the eventIds are not equal");
     }
- }
-
+  }
 
   public static void setReceivedOperationToFalse()
   {
@@ -440,7 +451,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("put failed due to " + e);
+      fail("put failed due to ", e);
     }
     return null;
   }
@@ -459,7 +470,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("put failed due to " + e);
+      fail("put failed due to ", e);
     }
     return null;
   }
@@ -496,7 +507,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return evids;
     }
     catch (Exception e) {
-      fail("put failed due to " + e);
+      fail("put failed due to ", e);
     }
     return null;
   }
@@ -515,7 +526,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("put failed due to " + e);
+      fail("put failed due to ", e);
     }
     return null;
   }
@@ -534,7 +545,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("put failed due to " + e);
+      fail("put failed due to ", e);
     }
     return null;
   }
@@ -554,7 +565,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("put failed due to " + e);
+      fail("put failed due to ", e);
     }
     return null;
   }
@@ -573,7 +584,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("clear failed due to " + e);
+      fail("clear failed due to ", e);
     }
     return null;
   }
@@ -595,7 +606,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       return eventId;
     }
     catch (Exception e) {
-      fail("Destroy failed due to " + e);
+      fail("Destroy failed due to ", e);
     }
     return null;
   }
@@ -618,7 +629,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
           lockObject.wait(10000);
         }
         catch (InterruptedException e) {
-          fail("interrupted");
+          fail("interrupted", e);
         }
       }
       if (!receivedOperation) {
@@ -631,14 +642,12 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
   /**
    * Listener which sends a notification after create to waiting threads and
    * also extracts teh event id storing it in a static variable
-   *
    */
-  static class HAEventIdPropagationListenerForClient extends
-      CacheListenerAdapter
-  {
+  static class HAEventIdPropagationListenerForClient extends CacheListenerAdapter {
 
     private int putAllReceivedCount = 0;
 
+    @Override
     public void afterCreate(EntryEvent event)
     {
       LogWriterUtils.getLogWriter().fine(" entered after created with " + event.getKey());
@@ -678,6 +687,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterUpdate(EntryEvent event)
     {
       synchronized (lockObject) {
@@ -686,6 +696,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterInvalidate(EntryEvent event)
     {
       synchronized (lockObject) {
@@ -694,6 +705,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterDestroy(EntryEvent event)
     {
       synchronized (lockObject) {
@@ -702,6 +714,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterRegionDestroy(RegionEvent event)
     {
       synchronized (lockObject) {
@@ -711,6 +724,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterRegionClear(RegionEvent event)
     {
       synchronized (lockObject) {
@@ -724,14 +738,12 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
   /**
    * Listener which sends a notification after create to waiting threads and
    * also extracts teh event id storing it in a static variable
-   *
    */
-  static class HAEventIdPropagationListenerForServer extends
-      CacheListenerAdapter
-  {
+  static class HAEventIdPropagationListenerForServer extends CacheListenerAdapter {
 
     private int putAllReceivedCount = 0;
 
+    @Override
     public void afterCreate(EntryEvent event)
     {
       LogWriterUtils.getLogWriter().fine(" entered after created with " + event.getKey());
@@ -778,6 +790,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterUpdate(EntryEvent event)
     {
       eventId = ((EntryEventImpl)event).getEventId();
@@ -788,6 +801,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterInvalidate(EntryEvent event)
     {
       eventId = ((EntryEventImpl)event).getEventId();
@@ -798,6 +812,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterDestroy(EntryEvent event)
     {
       eventId = ((EntryEventImpl)event).getEventId();
@@ -808,6 +823,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       }
     }
 
+    @Override
     public void afterRegionDestroy(RegionEvent event)
     {
       LogWriterUtils.getLogWriter().info("Before Regionestroy in Server");
@@ -820,6 +836,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
       LogWriterUtils.getLogWriter().info("After RegionDestroy in Server");
     }
 
+    @Override
     public void afterRegionClear(RegionEvent event)
     {
       eventId = ((RegionEventImpl)event).getEventId();