You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by kl...@apache.org on 2016/02/05 01:16:07 UTC
[01/33] incubator-geode git commit: Cleanup
Repository: incubator-geode
Updated Branches:
refs/heads/feature/GEODE-773-2 b17027b64 -> c05f6798d
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPersistenceEnabledGatewaySenderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPersistenceEnabledGatewaySenderDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPersistenceEnabledGatewaySenderDUnitTest.java
index c0f6dea..01f665c 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPersistenceEnabledGatewaySenderDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPersistenceEnabledGatewaySenderDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
WANTestBase {
@@ -145,14 +145,14 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
- LogWriterSupport.getLogWriter().info("Created remote receivers");
+ LogWriterUtils.getLogWriter().info("Created remote receivers");
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created local site cache");
+ LogWriterUtils.getLogWriter().info("Created local site cache");
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
true, 100, 10, false, true, null, true });
@@ -163,7 +163,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
true, 100, 10, false, true, null, true });
- LogWriterSupport.getLogWriter().info("Created local site senders");
+ LogWriterUtils.getLogWriter().info("Created local site senders");
vm4.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName(), "ln", 1, 100, isOffHeap() });
@@ -174,15 +174,15 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName(), "ln", 1, 100, isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created local site persistent PR");
+ LogWriterUtils.getLogWriter().info("Created local site persistent PR");
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started sender on vm4");
+ LogWriterUtils.getLogWriter().info("Started sender on vm4");
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the senders");
+ LogWriterUtils.getLogWriter().info("Started the senders");
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName(), null, 1, 100, isOffHeap() });
@@ -234,7 +234,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
@@ -272,7 +272,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -281,7 +281,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -289,7 +289,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -301,7 +301,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName(), "ln", 1, 100, isOffHeap() });
@@ -322,7 +322,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders in async mode. This will ensure that the
//node of shadow PR that went down last will come up first
@@ -331,14 +331,14 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -383,7 +383,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -421,7 +421,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -430,7 +430,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -438,7 +438,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -450,7 +450,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
getTestMethodName(), "ln", 1, 100, isOffHeap() });
@@ -471,7 +471,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders in async mode. This will ensure that the
//node of shadow PR that went down last will come up first
@@ -480,14 +480,14 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -528,7 +528,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, false });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on local site
vm4.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -560,7 +560,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 300 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -569,7 +569,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -577,7 +577,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -589,7 +589,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -611,7 +611,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
vm4.invoke(WANTestBase.class, "unsetRemoveFromQueueOnException", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "unsetRemoveFromQueueOnException", new Object[] { "ln" });
@@ -625,7 +625,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
@@ -633,13 +633,13 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Creating the receiver.");
+ LogWriterUtils.getLogWriter().info("Creating the receiver.");
//create receiver on remote site
vm2.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
//create PR on remote site
- LogWriterSupport.getLogWriter().info("Creating the partitioned region at receiver. ");
+ LogWriterUtils.getLogWriter().info("Creating the partitioned region at receiver. ");
vm2.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
getTestMethodName(), null, 1, 100, isOffHeap() });
vm3.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -649,7 +649,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Doing some extra puts. ");
+ LogWriterUtils.getLogWriter().info("Doing some extra puts. ");
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPutsAfter300", new Object[] { getTestMethodName(), 1000 });
//----------------------------------------------------------------------------------------------------
@@ -658,7 +658,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Validating the region size at the receiver end. ");
+ LogWriterUtils.getLogWriter().info("Validating the region size at the receiver end. ");
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
getTestMethodName(), 1000 });
vm3.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
@@ -702,7 +702,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -740,7 +740,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -749,7 +749,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -757,7 +757,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -769,7 +769,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -791,7 +791,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders in async mode. This will ensure that the
//node of shadow PR that went down last will come up first
@@ -800,14 +800,14 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -860,7 +860,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -898,7 +898,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPutsWithKeyAsString", new Object[] { getTestMethodName(), 1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -907,7 +907,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -915,7 +915,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -927,7 +927,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -949,7 +949,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders in async mode. This will ensure that the
//node of shadow PR that went down last will come up first
@@ -958,14 +958,14 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -1010,7 +1010,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -1048,7 +1048,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPutsWithKeyAsString", new Object[] { getTestMethodName(), 1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -1057,7 +1057,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1065,7 +1065,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -1077,7 +1077,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
// create PR on local site
vm4.invoke(WANTestBase.class, "createPersistentPartitionedRegion",
@@ -1089,7 +1089,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createPersistentPartitionedRegion",
new Object[] { getTestMethodName(), "ln", 1, 100, isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders in async mode. This will ensure that the
//node of shadow PR that went down last will come up first
@@ -1098,14 +1098,14 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -1125,7 +1125,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//do some extra puts in region on local site
vm4.invoke(WANTestBase.class, "doPutsWithKeyAsString", new Object[] { getTestMethodName(), 10000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
@@ -1158,7 +1158,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
// start puts in region on local site
vm4.invoke(WANTestBase.class, "doPutsWithKeyAsString", new Object[] {
getTestMethodName(), 1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
// --------------------close and rebuild local site
// -------------------------------------------------
@@ -1168,7 +1168,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
// restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1176,7 +1176,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
// // create PR on local site
// vm4.invoke(WANTestBase.class, "createPersistentPartitionedRegion",
@@ -1213,7 +1213,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
vm4.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
getTestMethodName(), 1000 });
@@ -1262,7 +1262,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -1300,7 +1300,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -1309,7 +1309,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1317,7 +1317,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders from disk store
@@ -1330,7 +1330,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//start the senders. NOTE that the senders are not associated with partitioned region
@@ -1339,16 +1339,16 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the senders.");
+ LogWriterUtils.getLogWriter().info("Started the senders.");
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -1392,7 +1392,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
@@ -1430,7 +1430,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//kill the senders
vm4.invoke(WANTestBase.class, "killSender", new Object[] {});
@@ -1438,7 +1438,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders. The local site has been brought down.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders. The local site has been brought down.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1446,7 +1446,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -1458,7 +1458,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
vm4.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
@@ -1470,7 +1470,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName(), "ln", 1, 100, isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
@@ -1478,16 +1478,16 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the senders.");
+ LogWriterUtils.getLogWriter().info("Started the senders.");
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
getTestMethodName(), 1000 });
@@ -1569,7 +1569,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//----------------- Close and rebuild local site -------------------------------------
//kill the senders
@@ -1578,7 +1578,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1586,7 +1586,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create back the senders
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
@@ -1598,7 +1598,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm7.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
true, 100, 10, false, false, null, true });
- LogWriterSupport.getLogWriter().info("Created the senders again");
+ LogWriterUtils.getLogWriter().info("Created the senders again");
vm4.invoke(WANTestBase.class, "setRemoveFromQueueOnException", new Object[] { "ln", true });
vm5.invoke(WANTestBase.class, "setRemoveFromQueueOnException", new Object[] { "ln", true });
@@ -1611,9 +1611,9 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the senders.");
+ LogWriterUtils.getLogWriter().info("Started the senders.");
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
@@ -1621,7 +1621,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPersistentPartitionedRegion", new Object[] {
@@ -1643,13 +1643,13 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//-------------------------------------------------------------------------------------------
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName(), 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] { getTestMethodName(), 3000 });
vm3.invoke(WANTestBase.class, "validateRegionSize", new Object[] { getTestMethodName(), 3000 });
@@ -1711,7 +1711,7 @@ public class ParallelWANPersistenceEnabledGatewaySenderDUnitTest extends
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
getTestMethodName() + "_PR", 1000 });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPropagationDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPropagationDUnitTest.java
index 7ea17d6..530e101 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPropagationDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelWANPropagationDUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase.MyGatewayEventFilter;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
public class ParallelWANPropagationDUnitTest extends WANTestBase {
@@ -421,7 +421,7 @@ public class ParallelWANPropagationDUnitTest extends WANTestBase {
Integer regionSize =
(Integer) vm2.invoke(WANTestBase.class, "getRegionSize", new Object[] {getTestMethodName() + "_PR" });
- LogWriterSupport.getLogWriter().info("Region size on remote is: " + regionSize);
+ LogWriterUtils.getLogWriter().info("Region size on remote is: " + regionSize);
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialGatewaySenderOperationsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialGatewaySenderOperationsDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialGatewaySenderOperationsDUnitTest.java
index 90ccd1b..b306c22 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialGatewaySenderOperationsDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialGatewaySenderOperationsDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.internal.cache.wan.GatewaySenderException;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.RMIException;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -175,7 +175,7 @@ public class SerialGatewaySenderOperationsDUnitTest extends WANTestBase {
fail("Interrupted the async invocation.");
}
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
validateQueueContents(vm4, "ln", 0);
validateQueueContents(vm5, "ln", 0);
@@ -315,7 +315,7 @@ public class SerialGatewaySenderOperationsDUnitTest extends WANTestBase {
vm4.invoke(() -> WANTestBase.startSender( "ln" ));
asyncPuts.getResult();
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(() -> WANTestBase.validateRegionSize(
getTestMethodName() + "_RR", 300 ));
@@ -354,7 +354,7 @@ public class SerialGatewaySenderOperationsDUnitTest extends WANTestBase {
vm4.invoke(() -> WANTestBase.doPuts( getTestMethodName() + "_RR",
100 ));
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(() -> WANTestBase.validateRegionSize(
getTestMethodName() + "_RR", 100 ));
@@ -389,7 +389,7 @@ public class SerialGatewaySenderOperationsDUnitTest extends WANTestBase {
vm5.invoke(() -> WANTestBase.doPuts( getTestMethodName() + "_RR",
100 ));
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(() -> WANTestBase.validateRegionSize(
getTestMethodName() + "_RR", 100 ));
}
@@ -432,7 +432,7 @@ public class SerialGatewaySenderOperationsDUnitTest extends WANTestBase {
vm2.invoke(() -> WANTestBase.createReceiver( nyPort ));
vm2.invoke(() -> WANTestBase.createReplicatedRegion(
getTestMethodName() + "_RR", null, isOffHeap() ));
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
vm2.invoke(() -> WANTestBase.validateRegionSize(
getTestMethodName() + "_RR", 100 ));
vm5.invoke(() -> WANTestBase.stopSender( "ln" ));
@@ -582,7 +582,7 @@ public class SerialGatewaySenderOperationsDUnitTest extends WANTestBase {
public void run() {
InternalLocator inl = (InternalLocator)Locator.getLocator();
ServerLocator servel = inl.getServerLocatorAdvisee();
- LogWriterSupport.getLogWriter().info("Server load map is " + servel.getLoadMap());
+ LogWriterUtils.getLogWriter().info("Server load map is " + servel.getLoadMap());
assertTrue("expected an empty map but found " + servel.getLoadMap(),
servel.getLoadMap().isEmpty());
QueueConnectionRequest request = new QueueConnectionRequest(
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPersistenceEnabledGatewaySenderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPersistenceEnabledGatewaySenderDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPersistenceEnabledGatewaySenderDUnitTest.java
index 6cc2d99..bd60ad6 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPersistenceEnabledGatewaySenderDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPersistenceEnabledGatewaySenderDUnitTest.java
@@ -19,7 +19,7 @@ package com.gemstone.gemfire.internal.cache.wan.serial;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
/**
@@ -217,8 +217,8 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The first ds is " + firstDStore);
- LogWriterSupport.getLogWriter().info("The first ds is " + secondDStore);
+ LogWriterUtils.getLogWriter().info("The first ds is " + firstDStore);
+ LogWriterUtils.getLogWriter().info("The first ds is " + secondDStore);
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", null, isOffHeap() });
@@ -243,7 +243,7 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR",
1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
// verify if the queue has all the events
// vm4.invoke(WANTestBase.class, "checkQueueSize", new Object[] { "ln", 1000
@@ -262,24 +262,24 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the sender. ");
+ LogWriterUtils.getLogWriter().info("Killed all the sender. ");
// restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore", new Object[] {
"ln", 2, false, 100, 10, false, true, null,
firstDStore, true });
- LogWriterSupport.getLogWriter().info("Creted the sender.... in vm4 ");
+ LogWriterUtils.getLogWriter().info("Creted the sender.... in vm4 ");
vm5.invoke(WANTestBase.class, "createSenderWithDiskStore", new Object[] {
"ln", 2, false, 100, 10, false, true, null,
secondDStore, true });
- LogWriterSupport.getLogWriter().info("Creted the sender.... in vm5 ");
+ LogWriterUtils.getLogWriter().info("Creted the sender.... in vm5 ");
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "startSender",
new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 4");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 4");
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 5");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 5");
try {
inv1.join();
} catch (InterruptedException e) {
@@ -322,8 +322,8 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The first ds is " + firstDStore);
- LogWriterSupport.getLogWriter().info("The first ds is " + secondDStore);
+ LogWriterUtils.getLogWriter().info("The first ds is " + firstDStore);
+ LogWriterUtils.getLogWriter().info("The first ds is " + secondDStore);
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", null, isOffHeap() });
@@ -348,13 +348,13 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR",
1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
// kill the vm
vm4.invoke(WANTestBase.class, "killSender", new Object[] {});
vm5.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed the sender. ");
+ LogWriterUtils.getLogWriter().info("Killed the sender. ");
// restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -362,11 +362,11 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class,
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, firstDStore, true });
- LogWriterSupport.getLogWriter().info("Created the sender.... in vm4 ");
+ LogWriterUtils.getLogWriter().info("Created the sender.... in vm4 ");
vm5.invoke(WANTestBase.class,
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, secondDStore, true });
- LogWriterSupport.getLogWriter().info("Created the sender.... in vm5 ");
+ LogWriterUtils.getLogWriter().info("Created the sender.... in vm5 ");
vm4.invoke(WANTestBase.class, "createPersistentReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
@@ -376,10 +376,10 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "startSender",
new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 4");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 4");
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 5");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 5");
try {
inv1.join();
} catch (InterruptedException e) {
@@ -445,7 +445,7 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR",
1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
// verify if the queue has all the events
// vm4.invoke(WANTestBase.class, "checkQueueSize", new Object[] { "ln", 1000
@@ -462,22 +462,22 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class, "killSender", new Object[] {});
vm5.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed the sender. ");
+ LogWriterUtils.getLogWriter().info("Killed the sender. ");
// restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] {
"ln", 2, false, 100, 10, false, false, null, true});
- LogWriterSupport.getLogWriter().info("Creted the sender.... in vm4 ");
+ LogWriterUtils.getLogWriter().info("Creted the sender.... in vm4 ");
vm5.invoke(WANTestBase.class, "createSender", new Object[] {
"ln", 2, false, 100, 10, false, false, null, true});
- LogWriterSupport.getLogWriter().info("Creted the sender.... in vm5 ");
+ LogWriterUtils.getLogWriter().info("Creted the sender.... in vm5 ");
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 4");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 4");
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 5");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 5");
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPersistentReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
@@ -529,8 +529,8 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The first ds is " + firstDStore);
- LogWriterSupport.getLogWriter().info("The first ds is " + secondDStore);
+ LogWriterUtils.getLogWriter().info("The first ds is " + firstDStore);
+ LogWriterUtils.getLogWriter().info("The first ds is " + secondDStore);
vm2.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", null, isOffHeap() });
@@ -555,13 +555,13 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR",
1000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
// kill the vm
vm4.invoke(WANTestBase.class, "killSender", new Object[] {});
vm5.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed the sender. ");
+ LogWriterUtils.getLogWriter().info("Killed the sender. ");
// restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -569,11 +569,11 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
vm4.invoke(WANTestBase.class,
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, firstDStore, true });
- LogWriterSupport.getLogWriter().info("Created the sender.... in vm4 ");
+ LogWriterUtils.getLogWriter().info("Created the sender.... in vm4 ");
vm5.invoke(WANTestBase.class,
"createSenderWithDiskStore", new Object[] { "ln", 2, false,
100, 10, false, true, null, secondDStore, true });
- LogWriterSupport.getLogWriter().info("Created the sender.... in vm5 ");
+ LogWriterUtils.getLogWriter().info("Created the sender.... in vm5 ");
vm4.invoke(WANTestBase.class, "createPersistentReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
@@ -583,10 +583,10 @@ public class SerialWANPersistenceEnabledGatewaySenderDUnitTest extends
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "startSender",
new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 4");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 4");
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started the sender in vm 5");
+ LogWriterUtils.getLogWriter().info("Started the sender in vm 5");
try {
inv1.join();
} catch (InterruptedException e) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPropogationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPropogationDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPropogationDUnitTest.java
index 18d028e..56e8d82 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPropogationDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/serial/SerialWANPropogationDUnitTest.java
@@ -27,7 +27,7 @@ import com.gemstone.gemfire.internal.cache.wan.BatchException70;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
public class SerialWANPropogationDUnitTest extends WANTestBase {
@@ -333,7 +333,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
Integer regionSize =
(Integer) vm2.invoke(WANTestBase.class, "getRegionSize", new Object[] {getTestMethodName() + "_RR" });
- LogWriterSupport.getLogWriter().info("Region size on remote is: " + regionSize);
+ LogWriterUtils.getLogWriter().info("Region size on remote is: " + regionSize);
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1310,7 +1310,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
vm6.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm7.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
- LogWriterSupport.getLogWriter().info("Started receivers on remote site");
+ LogWriterUtils.getLogWriter().info("Started receivers on remote site");
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1332,7 +1332,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started senders on local site");
+ LogWriterUtils.getLogWriter().info("Started senders on local site");
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
@@ -1341,7 +1341,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
AsyncInvocation inv1 = vm5.invokeAsync(WANTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_RR", 10000 });
- LogWriterSupport.getLogWriter().info("Started async puts on local site");
+ LogWriterUtils.getLogWriter().info("Started async puts on local site");
Wait.pause(1000);
Map oldConnectionInfo = (Map)vm4.invoke(WANTestBase.class,
@@ -1349,11 +1349,11 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
assertNotNull(oldConnectionInfo);
String oldServerHost = (String)oldConnectionInfo.get("serverHost");
int oldServerPort = (Integer)oldConnectionInfo.get("serverPort");
- LogWriterSupport.getLogWriter().info("Got sender to receiver connection information");
+ LogWriterUtils.getLogWriter().info("Got sender to receiver connection information");
AsyncInvocation inv2 = vm4.invokeAsync(WANTestBase.class, "killSender");
inv2.join();
- LogWriterSupport.getLogWriter().info("Killed primary sender on local site");
+ LogWriterUtils.getLogWriter().info("Killed primary sender on local site");
Wait.pause(5000);// give some time for vm5 to take primary charge
Map newConnectionInfo = (Map)vm5.invoke(WANTestBase.class,
@@ -1361,11 +1361,11 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
assertNotNull(newConnectionInfo);
String newServerHost = (String)newConnectionInfo.get("serverHost");
int newServerPort = (Integer)newConnectionInfo.get("serverPort");
- LogWriterSupport.getLogWriter().info("Got new sender to receiver connection information");
+ LogWriterUtils.getLogWriter().info("Got new sender to receiver connection information");
assertEquals(oldServerHost, newServerHost);
assertEquals(oldServerPort, newServerPort);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"Matched the new connection info with old connection info. Receiver affinity verified.");
@@ -1400,7 +1400,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
vm6.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
vm7.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
- LogWriterSupport.getLogWriter().info("Started receivers on remote site");
+ LogWriterUtils.getLogWriter().info("Started receivers on remote site");
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -1422,7 +1422,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started senders on local site");
+ LogWriterUtils.getLogWriter().info("Started senders on local site");
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
@@ -1431,7 +1431,7 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
AsyncInvocation inv1 = vm5.invokeAsync(WANTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_RR", 10000 });
- LogWriterSupport.getLogWriter().info("Started async puts on local site");
+ LogWriterUtils.getLogWriter().info("Started async puts on local site");
Wait.pause(1000);
Map oldConnectionInfo = (Map)vm4.invoke(WANTestBase.class,
@@ -1439,36 +1439,36 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
assertNotNull(oldConnectionInfo);
String oldServerHost = (String)oldConnectionInfo.get("serverHost");
int oldServerPort = (Integer)oldConnectionInfo.get("serverPort");
- LogWriterSupport.getLogWriter().info("Got sender to receiver connection information");
+ LogWriterUtils.getLogWriter().info("Got sender to receiver connection information");
// ---------------------------- KILL vm4
// --------------------------------------
AsyncInvocation inv2 = vm4.invokeAsync(WANTestBase.class, "killSender");
inv2.join();
- LogWriterSupport.getLogWriter().info("Killed vm4 (primary sender) on local site");
+ LogWriterUtils.getLogWriter().info("Killed vm4 (primary sender) on local site");
// -----------------------------------------------------------------------------
vm5.invoke(WANTestBase.class, "waitForSenderToBecomePrimary",
new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("vm5 sender has now acquired primary status");
+ LogWriterUtils.getLogWriter().info("vm5 sender has now acquired primary status");
Wait.pause(5000);// give time to process unprocessedEventsMap
// ---------------------------REBUILD vm4
// --------------------------------------
- LogWriterSupport.getLogWriter().info("Rebuilding vm4....");
+ LogWriterUtils.getLogWriter().info("Rebuilding vm4....");
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm4.invoke(WANTestBase.class, "createSender", new Object[] { "ln", 2,
false, 100, 10, false, false, null, true });
vm4.invoke(WANTestBase.class, "startSender", new Object[] { "ln" });
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
- LogWriterSupport.getLogWriter().info("Rebuilt vm4");
+ LogWriterUtils.getLogWriter().info("Rebuilt vm4");
// -----------------------------------------------------------------------------
// --------------------------- KILL vm5
// ----------------------------------------
inv1.join();// once the puts are done, kill vm5
- LogWriterSupport.getLogWriter().info("puts in vm5 are done");
+ LogWriterUtils.getLogWriter().info("puts in vm5 are done");
inv2 = vm5.invokeAsync(WANTestBase.class, "killSender");
inv2.join();
@@ -1481,10 +1481,10 @@ public class SerialWANPropogationDUnitTest extends WANTestBase {
assertNotNull(newConnectionInfo);
String newServerHost = (String)newConnectionInfo.get("serverHost");
int newServerPort = (Integer)newConnectionInfo.get("serverPort");
- LogWriterSupport.getLogWriter().info("Got new sender to receiver connection information");
+ LogWriterUtils.getLogWriter().info("Got new sender to receiver connection information");
assertEquals(oldServerHost, newServerHost);
assertEquals(oldServerPort, newServerPort);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"Matched the new connection info with old connection info. Receiver affinity verified.");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/management/WANManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/management/WANManagementDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/management/WANManagementDUnitTest.java
index 74f5554..675e15b 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/management/WANManagementDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/management/WANManagementDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -323,7 +323,7 @@ public class WANManagementDUnitTest extends ManagementTestBase {
DistributedSystemMXBean dsBean = service.getDistributedSystemMXBean();
Map<String, Boolean> dsMap = dsBean.viewRemoteClusterStatus();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Ds Map is: " + dsMap
+ "</ExpectedString> ");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/configuration/ClusterConfigurationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/configuration/ClusterConfigurationDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/configuration/ClusterConfigurationDUnitTest.java
index b076db7..43caf90 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/configuration/ClusterConfigurationDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/configuration/ClusterConfigurationDUnitTest.java
@@ -838,8 +838,8 @@ public class ClusterConfigurationDUnitTest extends CliCommandTestBase {
}
protected void executeAndVerifyCommand(String commandString) {
CommandResult cmdResult = executeCommand(commandString);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Command : " + commandString);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Command Result : " + commandResultToString(cmdResult));
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Command : " + commandString);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Command Result : " + commandResultToString(cmdResult));
assertEquals(Status.OK, cmdResult.getStatus());
assertFalse(cmdResult.failedToPersist());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestRemoteClusterDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestRemoteClusterDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestRemoteClusterDUnitTest.java
index ba69123..4d7882f 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestRemoteClusterDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestRemoteClusterDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.management.RegionMXBean;
import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -169,7 +169,7 @@ public class TestRemoteClusterDUnitTest extends ManagementTestBase {
.getDistributedSystemMXBean();
assertNotNull(dsBean);
Map<String, Boolean> dsMap = dsBean.viewRemoteClusterStatus();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Ds Map is: " + dsMap.size());
assertNotNull(dsMap);
assertEquals(true, dsMap.size() > 0 ? true : false);
[08/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Invoke.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Invoke.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Invoke.java
index edb742b..5a4ca15 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Invoke.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Invoke.java
@@ -19,6 +19,22 @@ package com.gemstone.gemfire.test.dunit;
import java.util.HashMap;
import java.util.Map;
+/**
+ * <code>Invoke</code> provides static utility methods that allow a
+ * <code>DistributedTest</code> to invoke a <code>SerializableRunnable</code>
+ * or <code>SerializableCallable</code> in a remote test <code>VM</code>.
+ *
+ * These methods can be used directly: <code>Invoke.invokeInEveryVM(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.Invoke.*;
+ * ...
+ * invokeInEveryVM(...);
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ */
public class Invoke {
protected Invoke() {
@@ -28,17 +44,17 @@ public class Invoke {
* Invokes a <code>SerializableRunnable</code> in every VM that
* DUnit knows about.
* <p>
- * Apparently this does NOT include the controller VM.
+ * Note: this does NOT include the controller VM or locator VM.
*
- * @see VM#invoke(Runnable)
+ * @see VM#invoke(SerializableRunnableIF)
*/
- public static void invokeInEveryVM(final SerializableRunnableIF work) {
- for (int h = 0; h < Host.getHostCount(); h++) {
- Host host = Host.getHost(h);
+ public static void invokeInEveryVM(final SerializableRunnableIF runnable) {
+ for (int hostIndex = 0; hostIndex < Host.getHostCount(); hostIndex++) {
+ Host host = Host.getHost(hostIndex);
- for (int v = 0; v < host.getVMCount(); v++) {
- VM vm = host.getVM(v);
- vm.invoke(work);
+ for (int vmIndex = 0; vmIndex < host.getVMCount(); vmIndex++) {
+ VM vm = host.getVM(vmIndex);
+ vm.invoke(runnable);
}
}
}
@@ -47,14 +63,16 @@ public class Invoke {
* Invokes a method in every remote VM that DUnit knows about.
*
* @see VM#invoke(Class, String)
+ * @deprecated Please use {@link #invokeInEveryVM(SerializableRunnableIF)} or another non-deprecated method in <code>Invoke</code> instead.
*/
- public static void invokeInEveryVM(final Class c, final String method) {
- for (int h = 0; h < Host.getHostCount(); h++) {
- Host host = Host.getHost(h);
+ @Deprecated
+ public static void invokeInEveryVM(final Class<?> targetClass, final String targetMethod) {
+ for (int hostIndex = 0; hostIndex < Host.getHostCount(); hostIndex++) {
+ Host host = Host.getHost(hostIndex);
- for (int v = 0; v < host.getVMCount(); v++) {
- VM vm = host.getVM(v);
- vm.invoke(c, method);
+ for (int vmIndex = 0; vmIndex < host.getVMCount(); vmIndex++) {
+ VM vm = host.getVM(vmIndex);
+ vm.invoke(targetClass, targetMethod);
}
}
}
@@ -63,14 +81,15 @@ public class Invoke {
* Invokes a method in every remote VM that DUnit knows about.
*
* @see VM#invoke(Class, String)
+ * @deprecated Please use {@link #invokeInEveryVM(SerializableRunnableIF)} or another non-deprecated method in <code>Invoke</code> instead.
*/
- public static void invokeInEveryVM(final Class c, final String method, final Object[] methodArgs) {
- for (int h = 0; h < Host.getHostCount(); h++) {
- Host host = Host.getHost(h);
+ public static void invokeInEveryVM(final Class<?> targetClass, final String targetMethod, final Object[] methodArgs) {
+ for (int hostIndex = 0; hostIndex < Host.getHostCount(); hostIndex++) {
+ Host host = Host.getHost(hostIndex);
- for (int v = 0; v < host.getVMCount(); v++) {
- VM vm = host.getVM(v);
- vm.invoke(c, method, methodArgs);
+ for (int vmIndex = 0; vmIndex < host.getVMCount(); vmIndex++) {
+ VM vm = host.getVM(vmIndex);
+ vm.invoke(targetClass, targetMethod, methodArgs);
}
}
}
@@ -79,54 +98,63 @@ public class Invoke {
* Invokes a <code>SerializableCallable</code> in every VM that
* DUnit knows about.
*
- * @return a Map of results, where the key is the VM and the value is the result
+ * @return a Map of results, where the key is the VM and the value is the result for that VM
* @see VM#invoke(SerializableCallableIF)
*/
- public static <T> Map<VM, T> invokeInEveryVM(final SerializableCallableIF<T> work) {
+ public static <T> Map<VM, T> invokeInEveryVM(final SerializableCallableIF<T> callable) {
Map<VM, T> ret = new HashMap<VM, T>();
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
- ret.put(vm, vm.invoke(work));
+ ret.put(vm, vm.invoke(callable));
}
}
return ret;
}
- public static void invokeInLocator(SerializableRunnableIF work) {
- Host.getLocator().invoke(work);
+ public static void invokeInLocator(final SerializableRunnableIF runnable) {
+ Host.getLocator().invoke(runnable);
}
- public static void invokeRepeatingIfNecessary(final VM vm, final RepeatableRunnable task) {
- vm.invokeRepeatingIfNecessary(task, 0);
+ /**
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} with {@link #invokeInEveryVM(SerializableCallableIF)} instead.
+ */
+ public static void invokeRepeatingIfNecessary(final VM vm, final RepeatableRunnable runnable) {
+ vm.invokeRepeatingIfNecessary(runnable, 0);
}
- public static void invokeRepeatingIfNecessary(final VM vm, final RepeatableRunnable task, final long repeatTimeoutMs) {
- vm.invokeRepeatingIfNecessary(task, repeatTimeoutMs);
+ /**
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} with {@link #invokeInEveryVM(SerializableCallableIF)} instead.
+ */
+ public static void invokeRepeatingIfNecessary(final VM vm, final RepeatableRunnable runnable, final long repeatTimeoutMs) {
+ vm.invokeRepeatingIfNecessary(runnable, repeatTimeoutMs);
}
- public static void invokeInEveryVMRepeatingIfNecessary(final RepeatableRunnable work) {
- Invoke.invokeInEveryVMRepeatingIfNecessary(work, 0);
+ /**
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} with {@link #invokeInEveryVM(SerializableCallableIF)} instead.
+ */
+ public static void invokeInEveryVMRepeatingIfNecessary(final RepeatableRunnable runnable) {
+ Invoke.invokeInEveryVMRepeatingIfNecessary(runnable, 0);
}
/**
* Invokes a <code>SerializableRunnable</code> in every VM that
- * DUnit knows about. If work.run() throws an assertion failure,
+ * DUnit knows about. If <code>run()</code> throws an assertion failure,
* its execution is repeated, until no assertion failure occurs or
- * repeatTimeout milliseconds have passed.
+ * <code>repeatTimeoutMs</code> milliseconds have passed.
*
- * @see VM#invoke(SerializableRunnableIF)
+ * @see VM#invoke(RepeatableRunnable)
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} with {@link #invokeInEveryVM(SerializableCallableIF)} instead.
*/
- public static void invokeInEveryVMRepeatingIfNecessary(final RepeatableRunnable work, final long repeatTimeoutMs) {
+ public static void invokeInEveryVMRepeatingIfNecessary(final RepeatableRunnable runnable, final long repeatTimeoutMs) {
for (int h = 0; h < Host.getHostCount(); h++) {
Host host = Host.getHost(h);
for (int v = 0; v < host.getVMCount(); v++) {
VM vm = host.getVM(v);
- vm.invokeRepeatingIfNecessary(work, repeatTimeoutMs);
+ vm.invokeRepeatingIfNecessary(runnable, repeatTimeoutMs);
}
}
}
-
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterSupport.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterSupport.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterSupport.java
deleted file mode 100755
index b804845..0000000
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterSupport.java
+++ /dev/null
@@ -1,72 +0,0 @@
-package com.gemstone.gemfire.test.dunit;
-
-import java.util.Properties;
-
-import org.apache.logging.log4j.Logger;
-
-import com.gemstone.gemfire.LogWriter;
-import com.gemstone.gemfire.distributed.internal.DistributionConfig;
-import com.gemstone.gemfire.distributed.internal.DistributionConfigImpl;
-import com.gemstone.gemfire.internal.logging.InternalLogWriter;
-import com.gemstone.gemfire.internal.logging.LogService;
-import com.gemstone.gemfire.internal.logging.LogWriterFactory;
-import com.gemstone.gemfire.internal.logging.ManagerLogWriter;
-import com.gemstone.gemfire.internal.logging.log4j.LogWriterLogger;
-
-public class LogWriterSupport {
-
- private static final Logger logger = LogService.getLogger();
- private static final LogWriterLogger oldLogger = LogWriterLogger.create(logger);
-
- /**
- * Returns a <code>LogWriter</code> for logging information
- * @deprecated Use a static logger from the log4j2 LogService.getLogger instead.
- */
- @Deprecated
- public static InternalLogWriter getLogWriter() {
- return LogWriterSupport.oldLogger;
- }
-
- /**
- * Creates a new LogWriter and adds it to the config properties. The config
- * can then be used to connect to DistributedSystem, thus providing early
- * access to the LogWriter before connecting. This call does not connect
- * to the DistributedSystem. It simply creates and returns the LogWriter
- * that will eventually be used by the DistributedSystem that connects using
- * config.
- *
- * @param config the DistributedSystem config properties to add LogWriter to
- * @return early access to the DistributedSystem LogWriter
- */
- public static LogWriter createLogWriter(Properties config) { // TODO:LOG:CONVERT: this is being used for ExpectedExceptions
- Properties nonDefault = config;
- if (nonDefault == null) {
- nonDefault = new Properties();
- }
- DistributedTestSupport.addHydraProperties(nonDefault);
-
- DistributionConfig dc = new DistributionConfigImpl(nonDefault);
- LogWriter logger = LogWriterFactory.createLogWriterLogger(
- false/*isLoner*/, false/*isSecurityLog*/, dc,
- false);
-
- // if config was non-null, then these will be added to it...
- nonDefault.put(DistributionConfig.LOG_WRITER_NAME, logger);
-
- return logger;
- }
-
- /**
- * This finds the log level configured for the test run. It should be used
- * when creating a new distributed system if you want to specify a log level.
- * @return the dunit log-level setting
- */
- public static String getDUnitLogLevel() {
- Properties p = DUnitEnv.get().getDistributedSystemProperties();
- String result = p.getProperty(DistributionConfig.LOG_LEVEL_NAME);
- if (result == null) {
- result = ManagerLogWriter.levelToString(DistributionConfig.DEFAULT_LOG_LEVEL);
- }
- return result;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterUtils.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterUtils.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterUtils.java
new file mode 100755
index 0000000..9ecea61
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/LogWriterUtils.java
@@ -0,0 +1,95 @@
+package com.gemstone.gemfire.test.dunit;
+
+import java.util.Properties;
+
+import org.apache.logging.log4j.Logger;
+
+import com.gemstone.gemfire.LogWriter;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.distributed.internal.DistributionConfigImpl;
+import com.gemstone.gemfire.internal.logging.InternalLogWriter;
+import com.gemstone.gemfire.internal.logging.LogService;
+import com.gemstone.gemfire.internal.logging.LogWriterFactory;
+import com.gemstone.gemfire.internal.logging.ManagerLogWriter;
+import com.gemstone.gemfire.internal.logging.log4j.LogWriterLogger;
+
+/**
+ * <code>LogWriterUtils</code> provides static utility methods to access a
+ * <code>LogWriter</code> within a test.
+ *
+ * These methods can be used directly: <code>LogWriterUtils.getLogWriter(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.LogWriterUtils.*;
+ * ...
+ * LogWriter logWriter = getLogWriter(...);
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ *
+ * @deprecated Please use a <code>Logger</code> from {@link LogService#getLogger()} instead.
+ */
+@Deprecated
+public class LogWriterUtils {
+
+ private static final Logger logger = LogService.getLogger();
+ private static final LogWriterLogger oldLogger = LogWriterLogger.create(logger);
+
+ protected LogWriterUtils() {
+ }
+
+ /**
+ * Returns a <code>LogWriter</code> for logging information
+ *
+ * @deprecated Please use a <code>Logger</code> from {@link LogService#getLogger()} instead.
+ */
+ public static InternalLogWriter getLogWriter() {
+ return LogWriterUtils.oldLogger;
+ }
+
+ /**
+ * Creates a new LogWriter and adds it to the config properties. The config
+ * can then be used to connect to DistributedSystem, thus providing early
+ * access to the LogWriter before connecting. This call does not connect
+ * to the DistributedSystem. It simply creates and returns the LogWriter
+ * that will eventually be used by the DistributedSystem that connects using
+ * config.
+ *
+ * @param properties the DistributedSystem config properties to add LogWriter to
+ * @return early access to the DistributedSystem LogWriter
+ * @deprecated Please use a <code>Logger</code> from {@link LogService#getLogger()} instead.
+ */
+ public static LogWriter createLogWriter(final Properties properties) {
+ Properties nonDefault = properties;
+ if (nonDefault == null) {
+ nonDefault = new Properties();
+ }
+ DistributedTestUtils.addHydraProperties(nonDefault);
+
+ DistributionConfig dc = new DistributionConfigImpl(nonDefault);
+ LogWriter logger = LogWriterFactory.createLogWriterLogger(
+ false/*isLoner*/, false/*isSecurityLog*/, dc,
+ false);
+
+ // if config was non-null, then these will be added to it...
+ nonDefault.put(DistributionConfig.LOG_WRITER_NAME, logger);
+
+ return logger;
+ }
+
+ /**
+ * This finds the log level configured for the test run. It should be used
+ * when creating a new distributed system if you want to specify a log level.
+ *
+ * @return the dunit log-level setting
+ */
+ public static String getDUnitLogLevel() {
+ Properties dsProperties = DUnitEnv.get().getDistributedSystemProperties();
+ String result = dsProperties.getProperty(DistributionConfig.LOG_LEVEL_NAME);
+ if (result == null) {
+ result = ManagerLogWriter.levelToString(DistributionConfig.DEFAULT_LOG_LEVEL);
+ }
+ return result;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkSupport.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkSupport.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkSupport.java
deleted file mode 100755
index cff1707..0000000
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkSupport.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.gemstone.gemfire.test.dunit;
-
-import java.net.UnknownHostException;
-
-import com.gemstone.gemfire.internal.SocketCreator;
-
-public class NetworkSupport {
-
- /** get the IP literal name for the current host, use this instead of
- * "localhost" to avoid IPv6 name resolution bugs in the JDK/machine config.
- * @return an ip literal, this method honors java.net.preferIPvAddresses
- */
- public static String getIPLiteral() {
- try {
- return SocketCreator.getLocalHost().getHostAddress();
- } catch (UnknownHostException e) {
- throw new Error("problem determining host IP address", e);
- }
- }
-
- /** get the host name to use for a server cache in client/server dunit
- * testing
- * @param host
- * @return the host name
- */
- public static String getServerHostName(Host host) {
- return System.getProperty("gemfire.server-bind-address") != null?
- System.getProperty("gemfire.server-bind-address")
- : host.getHostName();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkUtils.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkUtils.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkUtils.java
new file mode 100755
index 0000000..d83aecd
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/NetworkUtils.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.test.dunit;
+
+import java.net.UnknownHostException;
+
+import com.gemstone.gemfire.internal.SocketCreator;
+
+/**
+ * <code>NetworkUtils</code> provides static utility methods to perform
+ * network DNS lookups or similar actions.
+ *
+ * These methods can be used directly: <code>NetworkUtils.getIPLiteral()</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.NetworkUtils.*;
+ * ...
+ * String hostName = getIPLiteral();
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ */
+public class NetworkUtils {
+
+ protected NetworkUtils() {
+ }
+
+ /**
+ * Get the IP literal name for the current host. Use this instead of
+ * "localhost" to avoid IPv6 name resolution bugs in the JDK/machine config.
+ * This method honors java.net.preferIPvAddresses
+ *
+ * @return an IP literal which honors java.net.preferIPvAddresses
+ */
+ public static String getIPLiteral() {
+ try {
+ return SocketCreator.getLocalHost().getHostAddress();
+ } catch (UnknownHostException e) {
+ throw new Error("Problem determining host IP address", e);
+ }
+ }
+
+ /**
+ * Get the host name to use for a server cache in client/server dunit
+ * testing.
+ *
+ * @param host the dunit Host to get a machine host name for
+ * @return the host name
+ */
+ public static String getServerHostName(final Host host) {
+ String serverBindAddress = System.getProperty("gemfire.server-bind-address");
+ return serverBindAddress != null ? serverBindAddress : host.getHostName();
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RMIException.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RMIException.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RMIException.java
index 8a555d2..1a5fac4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RMIException.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RMIException.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.GemFireException;
* see hydra.RemoteTestModuleIF
*
* @author David Whitlock
- *
*/
+@SuppressWarnings("serial")
public class RMIException extends GemFireException {
/** SHADOWED FIELD that holds the cause exception (as opposed to the
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RepeatableRunnable.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RepeatableRunnable.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RepeatableRunnable.java
index 32e4369..9695c32 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RepeatableRunnable.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/RepeatableRunnable.java
@@ -20,8 +20,10 @@ package com.gemstone.gemfire.test.dunit;
* A RepeatableRunnable is an object that implements a method that
* can be invoked repeatably without causing any side affects.
*
- * @author dmonnie
+ * @author dmonnie
+ * @deprecated Please use SerializableRunnable with {@link com.jayway.awaitility.Awaitility} instead.
*/
+@Deprecated
public interface RepeatableRunnable {
public void runRepeatingIfNecessary(long repeatTimeoutMs);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableCallableIF.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableCallableIF.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableCallableIF.java
index c3d3ae7..ddeb71e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableCallableIF.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableCallableIF.java
@@ -19,6 +19,8 @@ package com.gemstone.gemfire.test.dunit;
import java.io.Serializable;
import java.util.concurrent.Callable;
+/**
+ * Interface for {@link SerializableCallable} to enable use with lambdas.
+ */
public interface SerializableCallableIF<T> extends Serializable, Callable<T> {
-
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnable.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnable.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnable.java
index 658924a..353cdc7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnable.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnable.java
@@ -46,8 +46,7 @@ import java.io.Serializable;
* }
* </PRE>
*/
-public abstract class SerializableRunnable
- implements SerializableRunnableIF {
+public abstract class SerializableRunnable implements SerializableRunnableIF {
private static final long serialVersionUID = 7584289978241650456L;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnableIF.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnableIF.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnableIF.java
index 648e4f8..5e5467d 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnableIF.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/SerializableRunnableIF.java
@@ -18,6 +18,8 @@ package com.gemstone.gemfire.test.dunit;
import java.io.Serializable;
+/**
+ * Interface for {@link SerializableRunnable} to enable use with lambdas.
+ */
public interface SerializableRunnableIF extends Serializable, Runnable {
-
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/StoppableWaitCriterion.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/StoppableWaitCriterion.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/StoppableWaitCriterion.java
index d90917e..b7be9c5 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/StoppableWaitCriterion.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/StoppableWaitCriterion.java
@@ -16,10 +16,20 @@
*/
package com.gemstone.gemfire.test.dunit;
+/**
+ * Defines an asynchronous criterion with an optional method to fail early
+ * before timeout.
+ *
+ * Extracted from DistributedTestCase.
+ *
+ * @deprecated Use {@link com.jayway.awaitility.Awaitility} instead.
+ */
public interface StoppableWaitCriterion extends WaitCriterion {
+
/**
* If this method returns true then quit waiting even if we are not done.
* This allows a wait to fail early.
*/
public boolean stopWaiting();
-}
\ No newline at end of file
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/ThreadUtils.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/ThreadUtils.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/ThreadUtils.java
new file mode 100755
index 0000000..6ba87ed
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/ThreadUtils.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.test.dunit;
+
+import static org.junit.Assert.fail;
+import static com.gemstone.gemfire.test.dunit.Jitter.*;
+
+import org.apache.logging.log4j.Logger;
+
+import com.gemstone.gemfire.internal.OSProcess;
+import com.gemstone.gemfire.internal.logging.LogService;
+
+/**
+ * <code>ThreadUtils</code> provides static utility methods to perform thread
+ * related actions such as dumping thread stacks.
+ *
+ * These methods can be used directly: <code>ThreadUtils.dumpAllStacks()</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.ThreadUtils.*;
+ * ...
+ * dumpAllStacks();
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ */
+public class ThreadUtils {
+
+ private static final Logger logger = LogService.getLogger();
+
+ protected ThreadUtils() {
+ }
+
+ /**
+ * Print stack dumps for all vms.
+ *
+ * @author bruce
+ * @since 5.0
+ */
+ public static void dumpAllStacks() {
+ for (int h=0; h < Host.getHostCount(); h++) {
+ dumpStack(Host.getHost(h));
+ }
+ }
+
+ /**
+ * Dump all thread stacks
+ */
+ public static void dumpMyThreads() {
+ OSProcess.printStacks(0, false);
+ }
+
+ /**
+ * Print a stack dump for this vm.
+ *
+ * @author bruce
+ * @since 5.0
+ */
+ public static void dumpStack() {
+ OSProcess.printStacks(0, false);
+ }
+
+ /**
+ * Print stack dumps for all vms on the given host.
+ *
+ * @author bruce
+ * @since 5.0
+ */
+ public static void dumpStack(final Host host) {
+ for (int v=0; v < host.getVMCount(); v++) {
+ host.getVM(v).invoke(com.gemstone.gemfire.test.dunit.DistributedTestCase.class, "dumpStack");
+ }
+ }
+
+ /**
+ * Print a stack dump for the given vm.
+ *
+ * @author bruce
+ * @since 5.0
+ */
+ public static void dumpStack(final VM vm) {
+ vm.invoke(com.gemstone.gemfire.test.dunit.DistributedTestCase.class, "dumpStack");
+ }
+
+ public static void dumpStackTrace(final Thread thread, final StackTraceElement[] stackTrace) {
+ StringBuilder msg = new StringBuilder();
+ msg.append("Thread=<")
+ .append(thread)
+ .append("> stackDump:\n");
+ for (int i=0; i < stackTrace.length; i++) {
+ msg.append("\t")
+ .append(stackTrace[i])
+ .append("\n");
+ }
+ logger.info(msg.toString());
+ }
+
+ /**
+ * Wait for a thread to join.
+ *
+ * @param thread thread to wait on
+ * @param timeoutMilliseconds maximum time to wait
+ * @throws AssertionError if the thread does not terminate
+ */
+ public static void join(final Thread thread, final long timeoutMilliseconds) {
+ final long tilt = System.currentTimeMillis() + timeoutMilliseconds;
+ final long incrementalWait = jitterInterval(timeoutMilliseconds);
+ final long start = System.currentTimeMillis();
+ for (;;) {
+ // I really do *not* understand why this check is necessary
+ // but it is, at least with JDK 1.6. According to the source code
+ // and the javadocs, one would think that join() would exit immediately
+ // if the thread is dead. However, I can tell you from experimentation
+ // that this is not the case. :-( djp 2008-12-08
+ if (!thread.isAlive()) {
+ break;
+ }
+ try {
+ thread.join(incrementalWait);
+ } catch (InterruptedException e) {
+ fail("interrupted");
+ }
+ if (System.currentTimeMillis() >= tilt) {
+ break;
+ }
+ } // for
+ if (thread.isAlive()) {
+ logger.info("HUNG THREAD");
+ ThreadUtils.dumpStackTrace(thread, thread.getStackTrace());
+ ThreadUtils.dumpMyThreads();
+ thread.interrupt(); // We're in trouble!
+ fail("Thread did not terminate after " + timeoutMilliseconds + " ms: " + thread);
+ }
+ long elapsedMs = (System.currentTimeMillis() - start);
+ if (elapsedMs > 0) {
+ String msg = "Thread " + thread + " took " + elapsedMs + " ms to exit.";
+ logger.info(msg);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Threads.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Threads.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Threads.java
deleted file mode 100755
index ff0b5ef..0000000
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Threads.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.gemstone.gemfire.test.dunit;
-
-import static org.junit.Assert.fail;
-
-import org.apache.logging.log4j.Logger;
-
-import com.gemstone.gemfire.LogWriter;
-import com.gemstone.gemfire.internal.OSProcess;
-import com.gemstone.gemfire.internal.logging.LocalLogWriter;
-import com.gemstone.gemfire.internal.logging.LogService;
-import com.gemstone.gemfire.internal.logging.LogWriterImpl;
-
-public class Threads {
- private static final Logger logger = LogService.getLogger();
-
- /**
- * Wait for a thread to join
- * @param t thread to wait on
- * @param ms maximum time to wait
- * @throws AssertionError if the thread does not terminate
- */
- static public void join(Thread t, long ms, LogWriter logger) {
- final long tilt = System.currentTimeMillis() + ms;
- final long incrementalWait = Jitter.jitterInterval(ms);
- final long start = System.currentTimeMillis();
- for (;;) {
- // I really do *not* understand why this check is necessary
- // but it is, at least with JDK 1.6. According to the source code
- // and the javadocs, one would think that join() would exit immediately
- // if the thread is dead. However, I can tell you from experimentation
- // that this is not the case. :-( djp 2008-12-08
- if (!t.isAlive()) {
- break;
- }
- try {
- t.join(incrementalWait);
- } catch (InterruptedException e) {
- fail("interrupted");
- }
- if (System.currentTimeMillis() >= tilt) {
- break;
- }
- } // for
- if (logger == null) {
- logger = new LocalLogWriter(LogWriterImpl.INFO_LEVEL, System.out);
- }
- if (t.isAlive()) {
- logger.info("HUNG THREAD");
- Threads.dumpStackTrace(t, t.getStackTrace(), logger);
- Threads.dumpMyThreads(logger);
- t.interrupt(); // We're in trouble!
- fail("Thread did not terminate after " + ms + " ms: " + t);
- // getLogWriter().warning("Thread did not terminate"
- // /* , new Exception()*/
- // );
- }
- long elapsedMs = (System.currentTimeMillis() - start);
- if (elapsedMs > 0) {
- String msg = "Thread " + t + " took "
- + elapsedMs
- + " ms to exit.";
- logger.info(msg);
- }
- }
-
- public static void dumpStackTrace(Thread t, StackTraceElement[] stack, LogWriter logger) {
- StringBuilder msg = new StringBuilder();
- msg.append("Thread=<")
- .append(t)
- .append("> stackDump:\n");
- for (int i=0; i < stack.length; i++) {
- msg.append("\t")
- .append(stack[i])
- .append("\n");
- }
- logger.info(msg.toString());
- }
-
- /**
- * Dump all thread stacks
- */
- public static void dumpMyThreads(LogWriter logger) {
- OSProcess.printStacks(0, false);
- }
-
- /** print a stack dump for this vm
- @author bruce
- @since 5.0
- */
- public static void dumpStack() {
- com.gemstone.gemfire.internal.OSProcess.printStacks(0, false);
- }
-
- /** print a stack dump for the given vm
- @author bruce
- @since 5.0
- */
- public static void dumpStack(VM vm) {
- vm.invoke(com.gemstone.gemfire.test.dunit.DistributedTestCase.class, "dumpStack");
- }
-
- /** print stack dumps for all vms on the given host
- @author bruce
- @since 5.0
- */
- public static void dumpStack(Host host) {
- for (int v=0; v < host.getVMCount(); v++) {
- host.getVM(v).invoke(com.gemstone.gemfire.test.dunit.DistributedTestCase.class, "dumpStack");
- }
- }
-
- /** print stack dumps for all vms
- @author bruce
- @since 5.0
- */
- public static void dumpAllStacks() {
- for (int h=0; h < Host.getHostCount(); h++) {
- dumpStack(Host.getHost(h));
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/VM.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/VM.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/VM.java
index d8bbd21..db3e302 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/VM.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/VM.java
@@ -18,9 +18,9 @@ package com.gemstone.gemfire.test.dunit;
import java.io.File;
import java.io.PrintWriter;
+import java.io.Serializable;
import java.io.StringWriter;
import java.rmi.RemoteException;
-import java.util.concurrent.Callable;
import com.gemstone.gemfire.test.dunit.standalone.BounceResult;
import com.gemstone.gemfire.test.dunit.standalone.RemoteDUnitVMIF;
@@ -31,9 +31,9 @@ import hydra.MethExecutorResult;
* This class represents a Java Virtual Machine that runs on a host.
*
* @author David Whitlock
- *
*/
-public class VM implements java.io.Serializable {
+@SuppressWarnings("serial")
+public class VM implements Serializable {
/** The host on which this VM runs */
private Host host;
@@ -53,7 +53,7 @@ public class VM implements java.io.Serializable {
* Creates a new <code>VM</code> that runs on a given host with a
* given process id.
*/
- public VM(Host host, int pid, RemoteDUnitVMIF client) {
+ public VM(final Host host, final int pid, final RemoteDUnitVMIF client) {
this.host = host;
this.pid = pid;
this.client = client;
@@ -83,7 +83,7 @@ public class VM implements java.io.Serializable {
* <code>void</code> return type in this VM. If the return type of
* the method is <code>void</code>, <code>null</code> is returned.
*
- * @param c
+ * @param targetClass
* The class on which to invoke the method
* @param methodName
* The name of the method to invoke
@@ -92,8 +92,8 @@ public class VM implements java.io.Serializable {
* An exception occurred on while invoking the method in
* this VM
*/
- public Object invoke(Class c, String methodName) {
- return invoke(c, methodName, new Object[0]);
+ public Object invoke(final Class targetClass, final String methodName) {
+ return invoke(targetClass, methodName, new Object[0]);
}
/**
@@ -102,13 +102,13 @@ public class VM implements java.io.Serializable {
* return type of the method is <code>void</code>, <code>null</code>
* is returned.
*
- * @param c
+ * @param targetClass
* The class on which to invoke the method
* @param methodName
* The name of the method to invoke
*/
- public AsyncInvocation invokeAsync(Class c, String methodName) {
- return invokeAsync(c, methodName, null);
+ public AsyncInvocation invokeAsync(final Class targetClass, final String methodName) {
+ return invokeAsync(targetClass, methodName, null);
}
/**
@@ -116,7 +116,7 @@ public class VM implements java.io.Serializable {
* <code>void</code> return type in this VM. If the return type of
* the method is <code>void</code>, <code>null</code> is returned.
*
- * @param c
+ * @param targetClass
* The class on which to invoke the method
* @param methodName
* The name of the method to invoke
@@ -128,17 +128,17 @@ public class VM implements java.io.Serializable {
* An exception occurred on while invoking the method in
* this VM
*/
- public Object invoke(Class c, String methodName, Object[] args) {
+ public Object invoke(Class targetClass, String methodName, Object[] args) {
if (!this.available) {
String s = "VM not available: " + this;
- throw new RMIException(this, c.getName(), methodName,
+ throw new RMIException(this, targetClass.getName(), methodName,
new IllegalStateException(s));
}
MethExecutorResult result = null;
int retryCount = 120;
do {
try {
- result = this.client.executeMethodOnClass(c.getName(), methodName, args);
+ result = this.client.executeMethodOnClass(targetClass.getName(), methodName, args);
break; // out of while loop
} catch( RemoteException e ) {
boolean isWindows = false;
@@ -157,7 +157,7 @@ public class VM implements java.io.Serializable {
}
}
} else {
- throw new RMIException(this, c.getName(), methodName, e );
+ throw new RMIException(this, targetClass.getName(), methodName, e );
}
}
} while (true);
@@ -167,7 +167,7 @@ public class VM implements java.io.Serializable {
} else {
Throwable thr = result.getException();
- throw new RMIException(this, c.getName(), methodName, thr,
+ throw new RMIException(this, targetClass.getName(), methodName, thr,
result.getStackTrace());
}
}
@@ -177,7 +177,7 @@ public class VM implements java.io.Serializable {
* <code>void</code> return type in this VM. If the return type of
* the method is <code>void</code>, <code>null</code> is returned.
*
- * @param c
+ * @param targetClass
* The class on which to invoke the method
* @param methodName
* The name of the method to invoke
@@ -185,13 +185,13 @@ public class VM implements java.io.Serializable {
* Arguments passed to the method call (must be {@link
* java.io.Serializable}).
*/
- public AsyncInvocation invokeAsync(final Class c,
+ public AsyncInvocation invokeAsync(final Class targetClass,
final String methodName,
final Object[] args) {
AsyncInvocation ai =
- new AsyncInvocation(c, methodName, new Runnable() {
+ new AsyncInvocation(targetClass, methodName, new Runnable() {
public void run() {
- final Object o = invoke(c, methodName, args);
+ final Object o = invoke(targetClass, methodName, args);
AsyncInvocation.setReturnValue(o);
}
});
@@ -282,12 +282,14 @@ public class VM implements java.io.Serializable {
}
/**
- * Invokes the <code>run</code method of a {@link Runnable} in this
+ * Invokes the <code>run</code> method of a {@link Runnable} in this
* VM. If the invocation throws AssertionFailedError, and repeatTimeoutMs
* is >0, the <code>run</code> method is invoked repeatedly until it
* either succeeds, or repeatTimeoutMs has passed. The AssertionFailedError
* is thrown back to the sender of this method if <code>run</code> has not
* completed successfully before repeatTimeoutMs has passed.
+ *
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} with {@link #invoke(SerializableCallableIF)} instead.
*/
public void invokeRepeatingIfNecessary(RepeatableRunnable o, long repeatTimeoutMs) {
invoke(o, "runRepeatingIfNecessary", new Object[] {new Long(repeatTimeoutMs)});
@@ -374,15 +376,15 @@ public class VM implements java.io.Serializable {
/**
* Invokes the <code>main</code> method of a given class
*
- * @param c
+ * @param targetClass
* The class on which to invoke the <code>main</code> method
* @param args
* The "command line" arguments to pass to the
* <code>main</code> method
*/
- public void invokeMain(Class c, String[] args) {
+ public void invokeMain(Class targetClass, String[] args) {
Object[] stupid = new Object[] { args };
- invoke(c, "main", stupid);
+ invoke(targetClass, "main", stupid);
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Wait.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Wait.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Wait.java
index 55791e9..3e218df 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Wait.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Wait.java
@@ -17,101 +17,58 @@
package com.gemstone.gemfire.test.dunit;
import static org.junit.Assert.fail;
+import static com.gemstone.gemfire.test.dunit.Jitter.*;
import org.apache.logging.log4j.Logger;
-import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.logging.LogService;
-import com.jayway.awaitility.Awaitility;
+/**
+ * <code>Wait</code> provides static utility methods to wait for some
+ * asynchronous action with intermittent polling.
+ *
+ * These methods can be used directly: <code>Wait.waitForCriterion(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.Wait.*;
+ * ...
+ * waitForCriterion(...);
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ *
+ * @deprecated Use {@link com.jayway.awaitility.Awaitility} instead.
+ */
+@Deprecated
public class Wait {
+
private static final Logger logger = LogService.getLogger();
- /**
- * Wait until given criterion is met
- * @param ev criterion to wait on
- * @param ms total time to wait, in milliseconds
- * @param interval pause interval between waits
- * @param throwOnTimeout if false, don't generate an error
- * @deprecated Use {@link Awaitility} instead.
- */
- @Deprecated
- static public void waitForCriterion(WaitCriterion ev, long ms,
- long interval, boolean throwOnTimeout) {
- long waitThisTime = Jitter.jitterInterval(interval);
- final long tilt = System.currentTimeMillis() + ms;
- for (;;) {
- // getLogWriter().info("Testing to see if event has occurred: " + ev.description());
- if (ev.done()) {
- return; // success
- }
- if (ev instanceof StoppableWaitCriterion) {
- StoppableWaitCriterion ev2 = (StoppableWaitCriterion)ev;
- if (ev2.stopWaiting()) {
- if (throwOnTimeout) {
- fail("stopWaiting returned true: " + ev.description());
- }
- return;
- }
- }
+ protected Wait() {
+ }
- // Calculate time left
- long timeLeft = tilt - System.currentTimeMillis();
- if (timeLeft <= 0) {
- if (!throwOnTimeout) {
- return; // not an error, but we're done
- }
- fail("Event never occurred after " + ms + " ms: " + ev.description());
- }
-
- if (waitThisTime > timeLeft) {
- waitThisTime = timeLeft;
- }
-
- // Wait a little bit
- Thread.yield();
- try {
- // getLogWriter().info("waiting " + waitThisTime + "ms for " + ev.description());
- Thread.sleep(waitThisTime);
- } catch (InterruptedException e) {
- fail("interrupted");
- }
- }
- }
-
/**
- * Blocks until the clock used for expiration moves forward.
- * @param baseTime the timestamp that the clock must exceed
- * @return the last time stamp observed
+ * Pause for a default interval (250 milliseconds).
+ *
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} instead.
*/
- public static final long waitForExpiryClockToChange(LocalRegion lr, final long baseTime) {
- long nowTime;
- do {
- Thread.yield();
- nowTime = lr.cacheTimeMillis();
- } while ((nowTime - baseTime) <= 0L);
- return nowTime;
+ public static void pause() {
+ pause(250);
}
/**
- * Blocks until the clock used for expiration moves forward.
- * @return the last time stamp observed
+ * Pause for the specified milliseconds. Make sure system clock has advanced
+ * by the specified number of millis before returning.
+ *
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} instead.
*/
- public static final long waitForExpiryClockToChange(LocalRegion lr) {
- return waitForExpiryClockToChange(lr, lr.cacheTimeMillis());
- }
-
- /** pause for specified ms interval
- * Make sure system clock has advanced by the specified number of millis before
- * returning.
- */
- public static final void pause(int ms) {
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
- if (ms >= 1000 || log.fineEnabled()) { // check for fine but log at info
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Pausing for " + ms + " ms..."/*, new Exception()*/);
+ public static final void pause(final int milliseconds) {
+ if (milliseconds >= 1000 || logger.isDebugEnabled()) { // check for debug but log at info
+ logger.info("Pausing for {} ms...", milliseconds);
}
- final long target = System.currentTimeMillis() + ms;
+ final long target = System.currentTimeMillis() + milliseconds;
try {
for (;;) {
long msLeft = target - System.currentTimeMillis();
@@ -125,23 +82,102 @@ public class Wait {
Assert.fail("interrupted", e);
}
}
+
+ /**
+ * Wait until given criterion is met
+ *
+ * @param waitCriterion criterion to wait on
+ * @param timeoutMillis total time to wait, in milliseconds
+ * @param pollingInterval pause interval between waits
+ * @param throwOnTimeout if false, don't generate an error
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} instead.
+ */
+ @Deprecated
+ public static void waitForCriterion(final WaitCriterion waitCriterion, final long timeoutMillis, final long pollingInterval, final boolean throwOnTimeout) {
+ long waitThisTime = jitterInterval(pollingInterval);
+ final long tilt = System.currentTimeMillis() + timeoutMillis;
+ for (;;) {
+ if (waitCriterion.done()) {
+ return; // success
+ }
+ if (waitCriterion instanceof StoppableWaitCriterion) {
+ StoppableWaitCriterion ev2 = (StoppableWaitCriterion)waitCriterion;
+ if (ev2.stopWaiting()) {
+ if (throwOnTimeout) {
+ fail("stopWaiting returned true: " + waitCriterion.description());
+ }
+ return;
+ }
+ }
+
+ // Calculate time left
+ long timeLeft = tilt - System.currentTimeMillis();
+ if (timeLeft <= 0) {
+ if (!throwOnTimeout) {
+ return; // not an error, but we're done
+ }
+ fail("Event never occurred after " + timeoutMillis + " ms: " + waitCriterion.description());
+ }
+
+ if (waitThisTime > timeLeft) {
+ waitThisTime = timeLeft;
+ }
+
+ // Wait a little bit
+ Thread.yield();
+ try {
+ Thread.sleep(waitThisTime);
+ } catch (InterruptedException e) {
+ fail("interrupted");
+ }
+ }
+ }
+
+ /**
+ * Blocks until the clock used for expiration moves forward.
+ *
+ * @param cacheTimeMillisSource region that provides cacheTimeMillis
+ * @return the last time stamp observed
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} instead.
+ */
+ public static final long waitForExpiryClockToChange(final LocalRegion cacheTimeMillisSource) {
+ return waitForExpiryClockToChange(cacheTimeMillisSource, cacheTimeMillisSource.cacheTimeMillis());
+ }
+
+ /**
+ * Blocks until the clock used for expiration moves forward.
+ *
+ * @param cacheTimeMillisSource region that provides cacheTimeMillis
+ * @param baseTime the timestamp that the clock must exceed
+ * @return the last time stamp observed
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} instead.
+ */
+ public static final long waitForExpiryClockToChange(final LocalRegion cacheTimeMillisSource, final long baseTime) {
+ long nowTime;
+ do {
+ Thread.yield();
+ nowTime = cacheTimeMillisSource.cacheTimeMillis();
+ } while ((nowTime - baseTime) <= 0L);
+ return nowTime;
+ }
/**
- * Wait on a mutex. This is done in a loop in order to address the
+ * Wait on a mutex. This is done in a loop in order to address the
* "spurious wakeup" "feature" in Java.
- * @param ev condition to test
+ *
+ * @param waitCriterion condition to test
* @param mutex object to lock and wait on
- * @param ms total amount of time to wait
- * @param interval interval to pause for the wait
+ * @param milliseconds total amount of time to wait
+ * @param pollingInterval interval to pause for the wait
* @param throwOnTimeout if false, no error is thrown.
+ * @deprecated Please use {@link com.jayway.awaitility.Awaitility} instead.
*/
- static public void waitMutex(WaitCriterion ev, Object mutex, long ms,
- long interval, boolean throwOnTimeout) {
- final long tilt = System.currentTimeMillis() + ms;
- long waitThisTime = Jitter.jitterInterval(interval);
+ public static void waitMutex(final WaitCriterion waitCriterion, final Object mutex, final long milliseconds, final long pollingInterval, final boolean throwOnTimeout) {
+ final long tilt = System.currentTimeMillis() + milliseconds;
+ long waitThisTime = jitterInterval(pollingInterval);
synchronized (mutex) {
for (;;) {
- if (ev.done()) {
+ if (waitCriterion.done()) {
break;
}
@@ -150,7 +186,7 @@ public class Wait {
if (!throwOnTimeout) {
return; // not an error, but we're done
}
- fail("Event never occurred after " + ms + " ms: " + ev.description());
+ fail("Event never occurred after " + milliseconds + " ms: " + waitCriterion.description());
}
if (waitThisTime > timeLeft) {
@@ -165,38 +201,4 @@ public class Wait {
} // for
} // synchronized
}
-
- /** pause for a default interval */
- public static void pause() {
- pause(250);
- }
-
- /**
- * Use of this function indicates a place in the tests tree where t
- * he use of Thread.sleep() is
- * highly questionable.
- * <p>
- * Some places in the system, especially those that test expirations and other
- * timeouts, have a very good reason to call {@link Thread#sleep(long)}. The
- * <em>other</em> places are marked by the use of this method.
- *
- * @param ms
- */
- static public final void staticPause(int ms) {
- // getLogWriter().info("FIXME: Pausing for " + ms + " ms..."/*, new Exception()*/);
- final long target = System.currentTimeMillis() + ms;
- try {
- for (;;) {
- long msLeft = target - System.currentTimeMillis();
- if (msLeft <= 0) {
- break;
- }
- Thread.sleep(msLeft);
- }
- }
- catch (InterruptedException e) {
- Assert.fail("interrupted", e);
- }
-
- }
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/WaitCriterion.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/WaitCriterion.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/WaitCriterion.java
index f835268..7575f8c 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/WaitCriterion.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/WaitCriterion.java
@@ -16,7 +16,18 @@
*/
package com.gemstone.gemfire.test.dunit;
+/**
+ * Defines an asynchronous criterion to wait for by invoking a method in
+ * {@link Wait}.
+ *
+ * Extracted from DistributedTestCase.
+ *
+ * @deprecated Use {@link com.jayway.awaitility.Awaitility} instead.
+ */
public interface WaitCriterion {
+
public boolean done();
+
public String description();
-}
\ No newline at end of file
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataDUnitTest.java
index fbad416..5850e6f 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataDUnitTest.java
@@ -43,10 +43,10 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -96,7 +96,7 @@ public class CqDataDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
cqDUnitTest.createClient(client, port, host0);
@@ -142,7 +142,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
@@ -259,7 +259,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server, 0, true);
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
cqDUnitTest.createClient(client, port, host0);
@@ -334,7 +334,7 @@ public class CqDataDUnitTest extends CacheTestCase {
/* Create Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client1, port, host0);
cqDUnitTest.createClient(client2, port, host0);
@@ -403,7 +403,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server2, 0, false, MirrorType.KEYS);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
cqDUnitTest.createClient(client, port1, host0);
@@ -451,7 +451,7 @@ public class CqDataDUnitTest extends CacheTestCase {
final int evictionThreshold = 1;
server1.invoke(new CacheSerializableRunnable("Create Cache Server") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
@@ -464,7 +464,7 @@ public class CqDataDUnitTest extends CacheTestCase {
for (int i = 0; i < cqDUnitTest.regions.length; i++) {
Region region = createRegion(cqDUnitTest.regions[i], factory.createRegionAttributes());
// Set CacheListener.
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
}
Wait.pause(2000);
@@ -479,7 +479,7 @@ public class CqDataDUnitTest extends CacheTestCase {
});
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
cqDUnitTest.createClient(client, port1, host0);
@@ -521,7 +521,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1, 0, false, MirrorType.KEYS_VALUES);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
final String[] regions = cqDUnitTest.regions;
final int[] serverPorts = new int[] {port1};
@@ -530,7 +530,7 @@ public class CqDataDUnitTest extends CacheTestCase {
SerializableRunnable createClientWithPool =
new CacheSerializableRunnable("createClientWithPool") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
// Initialize CQ Service.
try {
getCache().getQueryService();
@@ -544,7 +544,7 @@ public class CqDataDUnitTest extends CacheTestCase {
ClientServerTestCase.configureConnectionPool(regionFactory, serverHost, serverPorts[0], -1, false, -1, -1, null);
for (int i=0; i < regions.length; i++) {
createRegion(regions[i], regionFactory.create() );
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
}
}
};
@@ -579,7 +579,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1, 0, false, MirrorType.KEYS_VALUES);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
final String[] regions = cqDUnitTest.regions;
final int[] serverPorts = new int[] {port1};
@@ -588,7 +588,7 @@ public class CqDataDUnitTest extends CacheTestCase {
SerializableRunnable createClientWithPool =
new CacheSerializableRunnable("createClientWithPool") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -604,7 +604,7 @@ public class CqDataDUnitTest extends CacheTestCase {
for (int i=0; i < regions.length; i++) {
createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
}
}
};
@@ -634,7 +634,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1, 0, false, MirrorType.KEYS_VALUES);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
final String[] regions = cqDUnitTest.regions;
final int[] serverPorts = new int[] {port1};
@@ -643,7 +643,7 @@ public class CqDataDUnitTest extends CacheTestCase {
SerializableRunnable createClientWithConnectionPool =
new CacheSerializableRunnable("createClientWithConnectionPool") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -658,7 +658,7 @@ public class CqDataDUnitTest extends CacheTestCase {
ClientServerTestCase.configureConnectionPool(regionFactory, serverHost, serverPorts[0], -1, true, -1, -1, null);
for (int i=0; i < regions.length; i++) {
createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
}
}
};
@@ -688,7 +688,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1, 0, false, MirrorType.KEYS_VALUES);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
final String[] regions = cqDUnitTest.regions;
final int[] serverPorts = new int[] {port1};
@@ -697,7 +697,7 @@ public class CqDataDUnitTest extends CacheTestCase {
SerializableRunnable createClientWithPool =
new CacheSerializableRunnable("createClientWithPool") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -714,7 +714,7 @@ public class CqDataDUnitTest extends CacheTestCase {
for (int i=0; i < regions.length; i++) {
createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
}
}
};
@@ -753,7 +753,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client, port, host0);
@@ -768,7 +768,7 @@ public class CqDataDUnitTest extends CacheTestCase {
// Test for Event on Region Clear.
server.invoke(new CacheSerializableRunnable("testRegionEvents"){
public void run2()throws CacheException {
- LogWriterSupport.getLogWriter().info("### Clearing the region on the server ###");
+ LogWriterUtils.getLogWriter().info("### Clearing the region on the server ###");
Region region = getCache().getRegion("/root/" + cqDUnitTest.regions[0]);
for (int i = 1; i <=5; i++) {
region.put(CqQueryDUnitTest.KEY+i, new Portfolio(i));
@@ -782,7 +782,7 @@ public class CqDataDUnitTest extends CacheTestCase {
// Test for Event on Region invalidate.
server.invoke(new CacheSerializableRunnable("testRegionEvents"){
public void run2()throws CacheException {
- LogWriterSupport.getLogWriter().info("### Invalidate the region on the server ###");
+ LogWriterUtils.getLogWriter().info("### Invalidate the region on the server ###");
Region region = getCache().getRegion("/root/" + cqDUnitTest.regions[0]);
for (int i = 1; i <=5; i++) {
region.put(CqQueryDUnitTest.KEY+i, new Portfolio(i));
@@ -796,7 +796,7 @@ public class CqDataDUnitTest extends CacheTestCase {
// Test for Event on Region destroy.
server.invoke(new CacheSerializableRunnable("testRegionEvents"){
public void run2()throws CacheException {
- LogWriterSupport.getLogWriter().info("### Destroying the region on the server ###");
+ LogWriterUtils.getLogWriter().info("### Destroying the region on the server ###");
Region region = getCache().getRegion("/root/" + cqDUnitTest.regions[1]);
for (int i = 1; i <=5; i++) {
region.put(CqQueryDUnitTest.KEY+i, new Portfolio(i));
@@ -834,7 +834,7 @@ public class CqDataDUnitTest extends CacheTestCase {
final String cqName = "testEventsDuringQueryExecution_0";
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Initialize Client.
cqDUnitTest.createClient(client, port, host0);
@@ -943,7 +943,7 @@ public class CqDataDUnitTest extends CacheTestCase {
});
//wait for 60 seconds for test to complete
- Threads.join(processCqs, 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(processCqs, 60 * 1000);
// Close.
cqDUnitTest.closeClient(client);
@@ -974,7 +974,7 @@ public class CqDataDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Initialize Client.
cqDUnitTest.createClient(client, port, host0);
@@ -1119,7 +1119,7 @@ public class CqDataDUnitTest extends CacheTestCase {
});
//wait for 60 seconds for test to complete
- Threads.join(processCqs, 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(processCqs, 60 * 1000);
// Close.
cqDUnitTest.closeClient(client);
cqDUnitTest.closeServer(server);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataUsingPoolDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataUsingPoolDUnitTest.java
index 197467e..82d6279 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataUsingPoolDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqDataUsingPoolDUnitTest.java
@@ -62,8 +62,8 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -114,7 +114,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testClientWithFeederAndCQ";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -161,7 +161,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
@@ -280,7 +280,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server, 0, true);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQWithDestroysAndInvalidates";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -359,7 +359,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
/* Create Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName1 = "testCQWithMultipleClients1";
String poolName2 = "testCQWithMultipleClients2";
@@ -435,7 +435,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server2, 0, false, MirrorType.KEYS);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCQWithLoad";
cqDUnitTest.createPool(client, poolName, host0, port1);
@@ -490,7 +490,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
final int evictionThreshold = 5;
server1.invoke(new CacheSerializableRunnable("Create Cache Server") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setMirrorType(MirrorType.NONE);
@@ -500,7 +500,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
for (int i = 0; i < cqDUnitTest.regions.length; i++) {
Region region = createRegion(cqDUnitTest.regions[i], factory.createRegionAttributes());
// Set CacheListener.
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
}
Wait.pause(2000);
@@ -517,7 +517,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server2, 0, false, MirrorType.NONE);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCQWithEviction";
cqDUnitTest.createPool(client, poolName, host0, port1);
@@ -598,7 +598,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1, 0, false, MirrorType.KEYS_VALUES);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// final String[] regions = cqDUnitTest.regions;
// final int[] serverPorts = new int[] {port1};
@@ -648,7 +648,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testRegionEvents";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -666,7 +666,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Test for Event on Region Clear.
server.invoke(new CacheSerializableRunnable("testRegionEvents"){
public void run2()throws CacheException {
- LogWriterSupport.getLogWriter().info("### Clearing the region on the server ###");
+ LogWriterUtils.getLogWriter().info("### Clearing the region on the server ###");
Region region = getCache().getRegion("/root/" + cqDUnitTest.regions[0]);
for (int i = 1; i <=5; i++) {
region.put(CqQueryUsingPoolDUnitTest.KEY+i, new Portfolio(i));
@@ -680,7 +680,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Test for Event on Region invalidate.
server.invoke(new CacheSerializableRunnable("testRegionEvents"){
public void run2()throws CacheException {
- LogWriterSupport.getLogWriter().info("### Invalidate the region on the server ###");
+ LogWriterUtils.getLogWriter().info("### Invalidate the region on the server ###");
Region region = getCache().getRegion("/root/" + cqDUnitTest.regions[0]);
for (int i = 1; i <=5; i++) {
region.put(CqQueryUsingPoolDUnitTest.KEY+i, new Portfolio(i));
@@ -694,7 +694,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Test for Event on Region destroy.
server.invoke(new CacheSerializableRunnable("testRegionEvents"){
public void run2()throws CacheException {
- LogWriterSupport.getLogWriter().info("### Destroying the region on the server ###");
+ LogWriterUtils.getLogWriter().info("### Destroying the region on the server ###");
Region region = getCache().getRegion("/root/" + cqDUnitTest.regions[1]);
for (int i = 1; i <=5; i++) {
region.put(CqQueryUsingPoolDUnitTest.KEY+i, new Portfolio(i));
@@ -732,7 +732,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testEventsDuringQueryExecution";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -875,11 +875,11 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Start a client
client.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client.getHost()), server1Port), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client.getHost()), server1Port), regionName});
// Start a pub client
client2.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client2.getHost()), server1Port), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client2.getHost()), server1Port), regionName});
//client has thread that invokes new and remove cq over and over
client.invokeAsync(new CacheSerializableRunnable("Register cq") {
@@ -944,7 +944,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
/* Create Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
final String poolName1 = "pool1";
final String poolName2 = "pool2";
@@ -1083,7 +1083,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
/* Create Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
final String poolName1 = "pool1";
@@ -1131,11 +1131,11 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Start client 1
client1.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client1.getHost()), server1Port), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client1.getHost()), server1Port), regionName});
// Start client 2
client2.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client2.getHost()), server1Port), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client2.getHost()), server1Port), regionName});
createClient1CqsAndDurableCqs(client1, regionName);
createClient2CqsAndDurableCqs(client2, regionName);
@@ -1193,11 +1193,11 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Start client 1
client1.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client1.getHost()), server1Port), regionName, getDurableClientProperties("client1_dc", timeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client1.getHost()), server1Port), regionName, getDurableClientProperties("client1_dc", timeout)});
// Start client 2
client2.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client2.getHost()), server1Port), regionName, getDurableClientProperties("client2_dc", timeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client2.getHost()), server1Port), regionName, getDurableClientProperties("client2_dc", timeout)});
createClient1CqsAndDurableCqs(client1, regionName);
createClient2CqsAndDurableCqs(client2, regionName);
@@ -1258,11 +1258,11 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
// Start client 1
client1.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client1.getHost()), server1Port), regionName, getDurableClientProperties("client1_dc", timeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client1.getHost()), server1Port), regionName, getDurableClientProperties("client1_dc", timeout)});
// Start client 2
client2.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client2.getHost()), server1Port), regionName, getDurableClientProperties("client2_dc", timeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client2.getHost()), server1Port), regionName, getDurableClientProperties("client2_dc", timeout)});
//create the test cqs
createClient1CqsAndDurableCqs(client1, regionName);
@@ -1463,7 +1463,7 @@ public class CqDataUsingPoolDUnitTest extends CacheTestCase {
});
client.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(client.getHost()), serverPort), regionName, getDurableClientProperties(dcName, durableClientTimeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(client.getHost()), serverPort), regionName, getDurableClientProperties(dcName, durableClientTimeout)});
}
[09/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/SecurityTestUtil.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/SecurityTestUtil.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/SecurityTestUtil.java
index 6e07919..6e24398 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/SecurityTestUtil.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/SecurityTestUtil.java
@@ -80,8 +80,8 @@ import com.gemstone.gemfire.internal.logging.PureLogWriter;
import com.gemstone.gemfire.internal.util.Callable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -212,7 +212,7 @@ public class SecurityTestUtil extends DistributedTestCase {
Integer locatorPort = new Integer(AvailablePort
.getRandomAvailablePort(AvailablePort.SOCKET));
- String addr = NetworkSupport.getIPLiteral();
+ String addr = NetworkUtils.getIPLiteral();
if (locatorString == null) {
locatorString = addr + "[" + locatorPort + ']';
}
@@ -292,14 +292,14 @@ public class SecurityTestUtil extends DistributedTestCase {
authProps.setProperty(DistributionConfig.LOCATORS_NAME, locatorString);
if (locatorPort != null) {
authProps.setProperty(DistributionConfig.START_LOCATOR_NAME,
- NetworkSupport.getIPLiteral() + "[" + locatorPort.toString() + ']');
+ NetworkUtils.getIPLiteral() + "[" + locatorPort.toString() + ']');
}
} else {
- authProps.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ authProps.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
}
authProps.setProperty(DistributionConfig.SECURITY_LOG_LEVEL_NAME, "finest");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Set the server properties to: " + authProps);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Set the java properties to: " + javaProps);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Set the server properties to: " + authProps);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Set the java properties to: " + javaProps);
SecurityTestUtil tmpInstance = new SecurityTestUtil("temp");
try {
@@ -310,7 +310,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (AuthenticationRequiredException ex) {
if (expectedResult.intValue() == AUTHREQ_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when starting peer: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when starting peer: " + ex);
return new Integer(0);
}
else {
@@ -319,7 +319,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (AuthenticationFailedException ex) {
if (expectedResult.intValue() == AUTHFAIL_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when starting peer: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when starting peer: " + ex);
return new Integer(0);
}
else {
@@ -427,7 +427,7 @@ public class SecurityTestUtil extends DistributedTestCase {
//poolFactory.setSubscriptionEnabled(false);
}
pool = ClientServerTestCase.configureConnectionPoolWithNameAndFactory(factory,
- NetworkSupport.getIPLiteral(), portsI, subscriptionEnabled, 0,
+ NetworkUtils.getIPLiteral(), portsI, subscriptionEnabled, 0,
numConnections == null ? -1 : numConnections.intValue(), null, null,
poolFactory);
@@ -436,14 +436,14 @@ public class SecurityTestUtil extends DistributedTestCase {
}
tmpInstance.openCache();
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("multi-user mode " + multiUserAuthMode);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("multi-user mode " + multiUserAuthMode);
proxyCaches[0] = (ProxyCache)((PoolImpl) pool).createAuthenticatedCacheView(authProps);
if (!multiUserAuthMode) {
fail("Expected a UnsupportedOperationException but got none in single-user mode");
}
} catch (UnsupportedOperationException uoe) {
if (!multiUserAuthMode) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected UnsupportedOperationException in single-user mode");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected UnsupportedOperationException in single-user mode");
}
else {
Assert.fail("Got unexpected exception in multi-user mode ", uoe);
@@ -467,7 +467,7 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (AuthenticationRequiredException ex) {
if (expectedResult.intValue() == AUTHREQ_EXCEPTION
|| expectedResult.intValue() == NOFORCE_AUTHREQ_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when starting client: " + ex);
}
else {
@@ -476,7 +476,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (AuthenticationFailedException ex) {
if (expectedResult.intValue() == AUTHFAIL_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when starting client: " + ex);
}
else {
@@ -485,7 +485,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (ServerRefusedConnectionException ex) {
if (expectedResult.intValue() == CONNREFUSED_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when starting client: " + ex);
}
else {
@@ -569,7 +569,7 @@ public class SecurityTestUtil extends DistributedTestCase {
poolFactory.setMultiuserAuthentication(multiUserAuthMode);
poolFactory.setSubscriptionEnabled(true);
pool = ClientServerTestCase.configureConnectionPoolWithNameAndFactory(factory,
- NetworkSupport.getIPLiteral(), portsI, true, 1,
+ NetworkUtils.getIPLiteral(), portsI, true, 1,
numConnections == null ? -1 : numConnections.intValue(), null, null,
poolFactory);
@@ -596,7 +596,7 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (AuthenticationRequiredException ex) {
if (expectedResult.intValue() == AUTHREQ_EXCEPTION
|| expectedResult.intValue() == NOFORCE_AUTHREQ_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when starting client: " + ex);
}
else {
@@ -605,7 +605,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (AuthenticationFailedException ex) {
if (expectedResult.intValue() == AUTHFAIL_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when starting client: " + ex);
}
else {
@@ -614,7 +614,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (ServerRefusedConnectionException ex) {
if (expectedResult.intValue() == CONNREFUSED_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when starting client: " + ex);
}
else {
@@ -669,7 +669,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
authProps.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
authProps.setProperty(DistributionConfig.LOCATORS_NAME,
- NetworkSupport.getIPLiteral() + "[" + port + "]");
+ NetworkUtils.getIPLiteral() + "[" + port + "]");
authProps.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
clearStaticSSLContext();
setJavaProps((Properties)javaProps);
@@ -762,7 +762,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing puts: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing puts: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing puts", ex);
@@ -782,7 +782,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch(NoAvailableServersException ex) {
if(expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing puts: "
+ ex.getCause());
continue;
@@ -794,27 +794,27 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing puts: "
+ ex.getCause());
continue;
}
if ((expectedResult.intValue() == AUTHREQ_EXCEPTION)
&& (ex.getCause() instanceof AuthenticationRequiredException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected AuthenticationRequiredException when doing puts: "
+ ex.getCause());
continue;
}
if ((expectedResult.intValue() == AUTHFAIL_EXCEPTION)
&& (ex.getCause() instanceof AuthenticationFailedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected AuthenticationFailedException when doing puts: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing puts: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing puts: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing puts", ex);
@@ -822,7 +822,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing puts: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing puts: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing puts", ex);
@@ -845,7 +845,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing getAll: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing getAll: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing getAll", ex);
@@ -874,7 +874,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
} catch (NoAvailableServersException ex) {
if (expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing getAll: "
+ ex.getCause());
} else {
@@ -883,17 +883,17 @@ public class SecurityTestUtil extends DistributedTestCase {
} catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing getAll: "
+ ex.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing getAll: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing getAll: " + ex);
} else {
Assert.fail("Got unexpected exception when doing getAll", ex);
}
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing getAll: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing getAll: " + ex);
} else {
Assert.fail("Got unexpected exception when doing getAll", ex);
}
@@ -921,7 +921,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing gets: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing gets: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing gets", ex);
@@ -942,7 +942,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch(NoAvailableServersException ex) {
if(expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing gets: "
+ ex.getCause());
continue;
@@ -954,13 +954,13 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing gets: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing gets: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing gets: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing gets", ex);
@@ -968,7 +968,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing gets: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing gets: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing gets", ex);
@@ -1022,7 +1022,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertNotNull(region);
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when doing region destroy: " + ex);
} else {
Assert.fail("Got unexpected exception when doing region destroy", ex);
@@ -1042,7 +1042,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertNull(region);
} catch (NoAvailableServersException ex) {
if (expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing region destroy: "
+ ex.getCause());
} else {
@@ -1051,18 +1051,18 @@ public class SecurityTestUtil extends DistributedTestCase {
} catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing region destroy: "
+ ex.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when doing region destroy: " + ex);
} else {
Assert.fail("Got unexpected exception when doing region destroy", ex);
}
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when doing region destroy: " + ex);
} else {
Assert.fail("Got unexpected exception when doing region destroy", ex);
@@ -1086,7 +1086,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing destroys: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing destroys: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing destroys", ex);
@@ -1101,7 +1101,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch(NoAvailableServersException ex) {
if(expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing destroys: "
+ ex.getCause());
continue;
@@ -1113,13 +1113,13 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing destroys: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing destroys: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing destroys: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing destroys", ex);
@@ -1127,7 +1127,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing destroys: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing destroys: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing destroys", ex);
@@ -1152,7 +1152,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing invalidates: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing invalidates: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing invalidates", ex);
@@ -1167,7 +1167,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch(NoAvailableServersException ex) {
if(expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing invalidates: "
+ ex.getCause());
continue;
@@ -1179,13 +1179,13 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing invalidates: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing invalidates: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing invalidates: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing invalidates", ex);
@@ -1193,7 +1193,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing invalidates: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing invalidates: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing invalidates", ex);
@@ -1218,7 +1218,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing containsKey: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing containsKey: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing containsKey", ex);
@@ -1234,7 +1234,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch(NoAvailableServersException ex) {
if(expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing containsKey: "
+ ex.getCause());
continue;
@@ -1246,13 +1246,13 @@ public class SecurityTestUtil extends DistributedTestCase {
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing containsKey: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing containsKey: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing containsKey: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing containsKey", ex);
@@ -1260,7 +1260,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing containsKey: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing containsKey: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing containsKey", ex);
@@ -1282,7 +1282,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertNotNull(region);
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing queries: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing queries: " + ex);
} else {
Assert.fail("Got unexpected exception when doing queries", ex);
}
@@ -1297,7 +1297,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
} catch (NoAvailableServersException ex) {
if (expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing queries: "
+ ex.getCause());
} else {
@@ -1306,28 +1306,28 @@ public class SecurityTestUtil extends DistributedTestCase {
} catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing queries: "
+ ex.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing queries: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing queries: " + ex);
} else {
Assert.fail("Got unexpected exception when doing queries", ex);
}
} catch (QueryInvocationTargetException qite) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (qite.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing queries: "
+ qite.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing queries: " + qite);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing queries: " + qite);
} else {
Assert.fail("Got unexpected exception when doing queries", qite);
}
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Got expected exception when doing queries: " + ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Got expected exception when doing queries: " + ex);
} else {
Assert.fail("Got unexpected exception when doing queries", ex);
}
@@ -1347,7 +1347,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertNotNull(region);
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing function: " + ex);
} else {
Assert.fail("Got unexpected exception when executing function", ex);
@@ -1377,7 +1377,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
} catch (NoAvailableServersException ex) {
if (expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when executing function: "
+ ex.getCause());
} else {
@@ -1386,11 +1386,11 @@ public class SecurityTestUtil extends DistributedTestCase {
} catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when executing function: "
+ ex.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing function: " + ex);
} else {
Assert.fail("Got unexpected exception when executing function", ex);
@@ -1400,18 +1400,18 @@ public class SecurityTestUtil extends DistributedTestCase {
&& ((ex.getCause() instanceof NotAuthorizedException) || ((ex
.getCause() instanceof ServerOperationException) && (((ServerOperationException)ex
.getCause()).getCause() instanceof NotAuthorizedException)))) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when executing function: "
+ ex.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing function: " + ex);
} else {
Assert.fail("Got unexpected exception when executing function", ex);
}
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing function: " + ex);
} else {
Assert.fail("Got unexpected exception when executing function", ex);
@@ -1431,7 +1431,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertNotNull(region);
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing query: " + ex);
} else {
Assert.fail("Got unexpected exception when executing query", ex);
@@ -1453,7 +1453,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertEquals(expectedValue.intValue(), result.asList().size());
} catch (NoAvailableServersException ex) {
if (expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when executing query: "
+ ex.getCause());
} else {
@@ -1462,18 +1462,18 @@ public class SecurityTestUtil extends DistributedTestCase {
} catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when executing query: "
+ ex.getCause());
} else if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing query: " + ex);
} else {
Assert.fail("Got unexpected exception when executing query", ex);
}
} catch (Exception ex) {
if (expectedResult.intValue() == OTHER_EXCEPTION) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Got expected exception when executing query: " + ex);
} else {
Assert.fail("Got unexpected exception when executing query", ex);
@@ -1498,7 +1498,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("PUT: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("PUT: MultiUser# " + i);
doPutsP(num, Integer.valueOf(i), expectedResults[i], false);
}
}
@@ -1524,7 +1524,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"GET_ALL" + (useTX ? " in TX" : "") + ": MultiUser# " + i);
doGetAllP(Integer.valueOf(i), expectedResults[i], useTX);
}
@@ -1537,7 +1537,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("GET: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("GET: MultiUser# " + i);
doGetsP(num, Integer.valueOf(i), expectedResults[i], false);
}
}
@@ -1549,7 +1549,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but expected results " + expectedResults.length);
}
for (int i = numOfUsers-1; i >= 0; i--) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("DESTROY: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("DESTROY: MultiUser# " + i);
doRegionDestroysP(Integer.valueOf(i), expectedResults[i]);
}
}
@@ -1561,7 +1561,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("DESTROY: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("DESTROY: MultiUser# " + i);
doDestroysP(num, Integer.valueOf(i), expectedResults[i], false);
}
}
@@ -1573,7 +1573,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("INVALIDATE: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("INVALIDATE: MultiUser# " + i);
doInvalidatesP(num, Integer.valueOf(i), expectedResults[i], false);
}
}
@@ -1589,7 +1589,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but #expected output " + results.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("CONTAINS_KEY: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("CONTAINS_KEY: MultiUser# " + i);
doContainsKeysP(num, Integer.valueOf(i), expectedResults[i], false, results[i]);
}
}
@@ -1601,7 +1601,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but #expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("QUERY: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("QUERY: MultiUser# " + i);
doQueriesP(Integer.valueOf(i), expectedResults[i], valueSize);
}
}
@@ -1617,16 +1617,16 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but #expected output " + results.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("FunctionExecute:onRegion MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("FunctionExecute:onRegion MultiUser# " + i);
doFunctionExecuteP(Integer.valueOf(i), function, expectedResults[i], results[i], "region");
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("FunctionExecute:onServer MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("FunctionExecute:onServer MultiUser# " + i);
doFunctionExecuteP(Integer.valueOf(i), function, expectedResults[i], results[i], "server");
}
if (!isFailoverCase) {
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("FunctionExecute:onServers MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("FunctionExecute:onServers MultiUser# " + i);
doFunctionExecuteP(Integer.valueOf(i), function, expectedResults[i],
results[i], "servers");
}
@@ -1640,7 +1640,7 @@ public class SecurityTestUtil extends DistributedTestCase {
+ ", but #expected results " + expectedResults.length);
}
for (int i = 0; i < numOfUsers; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("QueryExecute: MultiUser# " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("QueryExecute: MultiUser# " + i);
doQueryExecuteP(Integer.valueOf(i), expectedResults[i], result);
}
}
@@ -1687,7 +1687,7 @@ public class SecurityTestUtil extends DistributedTestCase {
fail("Expected " + expectedResult + " but found "
+ e.getClass().getSimpleName() + " in doSimpleGet()");
} else {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().fine(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().fine(
"Got expected " + e.getClass().getSimpleName()
+ " in doSimpleGet()");
}
@@ -1707,7 +1707,7 @@ public class SecurityTestUtil extends DistributedTestCase {
Assert.fail("Expected " + expectedResult + " but found "
+ e.getClass().getSimpleName() + " in doSimplePut()", e);
} else {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().fine(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().fine(
"Got expected " + e.getClass().getSimpleName()
+ " in doSimplePut()");
}
@@ -1735,11 +1735,11 @@ public class SecurityTestUtil extends DistributedTestCase {
}
}
catch (IllegalAccessException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning("Exception while clearing static SSL field.", ex);
}
catch (ClassCastException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning("Exception while clearing static SSL field.", ex);
}
}
@@ -1759,7 +1759,7 @@ public class SecurityTestUtil extends DistributedTestCase {
assertNull(field.get(obj));
}
catch (IllegalAccessException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().warning("Exception while clearing SSL fields.", ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().warning("Exception while clearing SSL fields.", ex);
}
}
}
@@ -1784,7 +1784,7 @@ public class SecurityTestUtil extends DistributedTestCase {
}
}
catch (IllegalAccessException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().warning("Exception while getting SSL fields.", ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().warning("Exception while getting SSL fields.", ex);
}
}
return resultFields;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Assert.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Assert.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Assert.java
index 10c62da..5d927eb 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Assert.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Assert.java
@@ -17,17 +17,45 @@
package com.gemstone.gemfire.test.dunit;
/**
- * Extracted from DistributedTestCase
+ * Extends <code>org.junit.Assert</code> with additional assertion and fail
+ * methods.
+ *
+ * These methods can be used directly: <code>Assert.assertEquals(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.Assert.*;
+ * ...
+ * fail(...);
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ *
+ * @see java.lang.AssertionError
*/
public class Assert extends org.junit.Assert {
protected Assert() {
}
+ /**
+ * Fails a test by throwing a new {@code AssertionError} with the specified
+ * detail message and cause.
+ *
+ * <p>Note that the detail message associated with
+ * {@code cause} is <i>not</i> automatically incorporated in
+ * this error's detail message.
+ *
+ * @param message the detail message, may be {@code null}
+ * @param cause the cause, may be {@code null}
+ *
+ * @see java.lang.AssertionError
+ */
public static void fail(final String message, final Throwable cause) {
if (message == null && cause == null) {
throw new AssertionError();
- } if (message == null) {
+ }
+ if (message == null) {
throw new AssertionError(cause);
}
if (cause == null) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/AsyncInvocation.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/AsyncInvocation.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/AsyncInvocation.java
index e589863..544638e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/AsyncInvocation.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/AsyncInvocation.java
@@ -158,20 +158,22 @@ public class AsyncInvocation<T> extends Thread {
////////////////////// Inner Classes //////////////////////
/**
- * A <code>ThreadGroup</code> that notices when an exception occurrs
+ * A <code>ThreadGroup</code> that notices when an exception occurs
* during an <code>AsyncInvocation</code>.
+ *
+ * TODO: reimplement using Futures
*/
private static class AsyncInvocationGroup extends ThreadGroup {
AsyncInvocationGroup() {
super("Async Invocations");
}
- public void uncaughtException(Thread t, Throwable e) {
- if (e instanceof VirtualMachineError) {
- SystemFailure.setFailure((VirtualMachineError)e); // don't throw
+ public void uncaughtException(Thread thread, Throwable throwable) {
+ if (throwable instanceof VirtualMachineError) {
+ SystemFailure.setFailure((VirtualMachineError)throwable); // don't throw
}
- if (t instanceof AsyncInvocation) {
- ((AsyncInvocation) t).exception = e;
+ if (thread instanceof AsyncInvocation) {
+ ((AsyncInvocation) thread).exception = throwable;
}
}
}
@@ -202,8 +204,7 @@ public class AsyncInvocation<T> extends Thread {
return this.returnedObj;
}
- public void run()
- {
+ public void run() {
super.run();
this.returnedObj = (T) returnValue.get();
returnValue.set(null);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DUnitEnv.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DUnitEnv.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DUnitEnv.java
index eea2d65..d662779 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DUnitEnv.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DUnitEnv.java
@@ -35,7 +35,6 @@ import com.gemstone.gemfire.test.dunit.standalone.BounceResult;
* and run them on a different VM launching system.
*
* @author dsmith
- *
*/
public abstract class DUnitEnv {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerSupport.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerSupport.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerSupport.java
deleted file mode 100755
index 9de33e5..0000000
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerSupport.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.gemstone.gemfire.test.dunit;
-
-public class DebuggerSupport {
-
- protected DebuggerSupport() {
- }
-
- public static void attachDebugger(VM vm, final String msg) {
- vm.invoke(new SerializableRunnable("Attach Debugger") {
- public void run() {
- com.gemstone.gemfire.internal.util.DebuggerSupport.
- waitForJavaDebugger(msg);
- }
- });
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerUtils.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerUtils.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerUtils.java
new file mode 100755
index 0000000..534eab2
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DebuggerUtils.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.test.dunit;
+
+import com.gemstone.gemfire.internal.util.DebuggerSupport;
+
+/**
+ * <code>DebuggerUtils</code> provides static utility methods that facilitate
+ * runtime debugging.
+ *
+ * These methods can be used directly: <code>DebuggerUtils.attachDebugger(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.DebuggerUtils.*;
+ * ...
+ * attachDebugger(...);
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ *
+ * @see com.gemstone.gemfire.internal.util.DebuggerSupport
+ */
+public class DebuggerUtils {
+
+ protected DebuggerUtils() {
+ }
+
+ @SuppressWarnings("serial")
+ public static void attachDebugger(final VM vm, final String message) {
+ vm.invoke(new SerializableRunnable(DebuggerSupport.class.getSimpleName()+" waitForJavaDebugger") {
+ public void run() {
+ DebuggerSupport.waitForJavaDebugger(message);
+ }
+ });
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedSystemSupport.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedSystemSupport.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedSystemSupport.java
deleted file mode 100755
index 4356646..0000000
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedSystemSupport.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.gemstone.gemfire.test.dunit;
-
-import java.io.File;
-
-import com.gemstone.gemfire.distributed.DistributedSystem;
-import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
-import com.gemstone.gemfire.distributed.internal.membership.gms.MembershipManagerHelper;
-
-public class DistributedSystemSupport {
-
- /**
- * Crash the cache in the given VM in such a way that it immediately stops communicating with
- * peers. This forces the VM's membership manager to throw a ForcedDisconnectException by
- * forcibly terminating the JGroups protocol stack with a fake EXIT event.<p>
- *
- * NOTE: if you use this method be sure that you clean up the VM before the end of your
- * test with disconnectFromDS() or disconnectAllFromDS().
- */
- public static void crashDistributedSystem(final DistributedSystem msys) {
- MembershipManagerHelper.crashDistributedSystem(msys);
- MembershipManagerHelper.inhibitForcedDisconnectLogging(false);
- WaitCriterion wc = new WaitCriterion() {
- public boolean done() {
- return !msys.isConnected();
- }
- public String description() {
- return "waiting for distributed system to finish disconnecting: " + msys;
- }
- };
- Wait.waitForCriterion(wc, 10000, 1000, true);
- }
-
- /**
- * Crash the cache in the given VM in such a way that it immediately stops communicating with
- * peers. This forces the VM's membership manager to throw a ForcedDisconnectException by
- * forcibly terminating the JGroups protocol stack with a fake EXIT event.<p>
- *
- * NOTE: if you use this method be sure that you clean up the VM before the end of your
- * test with disconnectFromDS() or disconnectAllFromDS().
- */
- public static boolean crashDistributedSystem(VM vm) {
- return (Boolean)vm.invoke(new SerializableCallable("crash distributed system") {
- public Object call() throws Exception {
- DistributedSystem msys = InternalDistributedSystem.getAnyInstance();
- crashDistributedSystem(msys);
- return true;
- }
- });
- }
-
- /**
- * delete locator state files. Use this after getting a random port
- * to ensure that an old locator state file isn't picked up by the
- * new locator you're starting.
- * @param ports
- */
- public static void deleteLocatorStateFile(int... ports) {
- for (int i=0; i<ports.length; i++) {
- File stateFile = new File("locator"+ports[i]+"view.dat");
- if (stateFile.exists()) {
- stateFile.delete();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestCase.java
index 8d6e63f..15dd670 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestCase.java
@@ -75,6 +75,7 @@ import junit.framework.TestCase;
@Category(DistributedTest.class)
@SuppressWarnings("serial")
public abstract class DistributedTestCase extends TestCase implements java.io.Serializable {
+
private static final Logger logger = LogService.getLogger();
private static final Set<String> testHistory = new LinkedHashSet<String>();
@@ -129,7 +130,7 @@ public abstract class DistributedTestCase extends TestCase implements java.io.Se
}
if (system == null || !system.isConnected()) {
// Figure out our distributed system properties
- Properties p = DistributedTestSupport.getAllDistributedSystemProperties(props);
+ Properties p = DistributedTestUtils.getAllDistributedSystemProperties(props);
lastSystemCreatedInTest = getClass(); // used to be getDeclaringClass()
if (logPerTest) {
String testMethod = getTestMethodName();
@@ -146,10 +147,10 @@ public abstract class DistributedTestCase extends TestCase implements java.io.Se
} else {
boolean needNewSystem = false;
if(!getClass().equals(lastSystemCreatedInTest)) { // used to be getDeclaringClass()
- Properties newProps = DistributedTestSupport.getAllDistributedSystemProperties(props);
+ Properties newProps = DistributedTestUtils.getAllDistributedSystemProperties(props);
needNewSystem = !newProps.equals(lastSystemProperties);
if(needNewSystem) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Test class has changed and the new DS properties are not an exact match. "
+ "Forcing DS disconnect. Old props = "
+ lastSystemProperties + "new props=" + newProps);
@@ -163,7 +164,7 @@ public abstract class DistributedTestCase extends TestCase implements java.io.Se
String value = (String) entry.getValue();
if (!value.equals(activeProps.getProperty(key))) {
needNewSystem = true;
- LogWriterSupport.getLogWriter().info("Forcing DS disconnect. For property " + key
+ LogWriterUtils.getLogWriter().info("Forcing DS disconnect. For property " + key
+ " old value = " + activeProps.getProperty(key)
+ " new value = " + value);
break;
@@ -173,7 +174,7 @@ public abstract class DistributedTestCase extends TestCase implements java.io.Se
if(needNewSystem) {
// the current system does not meet our needs to disconnect and
// call recursively to get a new system.
- LogWriterSupport.getLogWriter().info("Disconnecting from current DS in order to make a new one");
+ LogWriterUtils.getLogWriter().info("Disconnecting from current DS in order to make a new one");
disconnectFromDS();
getSystem(props);
}
@@ -458,7 +459,7 @@ public abstract class DistributedTestCase extends TestCase implements java.io.Se
Invoke.invokeInEveryVM(()->tearDownVM());
Invoke.invokeInLocator(()->{
DistributionMessageObserver.setInstance(null);
- DistributedTestSupport.unregisterInstantiatorsInThisVM();
+ DistributedTestUtils.unregisterInstantiatorsInThisVM();
});
DUnitLauncher.closeAndCheckForSuspects();
}
@@ -473,7 +474,7 @@ public abstract class DistributedTestCase extends TestCase implements java.io.Se
ClientServerTestCase.AUTO_LOAD_BALANCE = false;
ClientStatsManager.cleanupForTests();
DiskStoreObserver.setInstance(null);
- DistributedTestSupport.unregisterInstantiatorsInThisVM();
+ DistributedTestUtils.unregisterInstantiatorsInThisVM();
DistributionMessageObserver.setInstance(null);
GlobalLockingDUnitTest.region_testBug32356 = null;
InitialImageOperation.slowImageProcessing = 0;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestSupport.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestSupport.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestSupport.java
deleted file mode 100755
index 4e8546f..0000000
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestSupport.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.gemstone.gemfire.test.dunit;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Properties;
-
-import com.gemstone.gemfire.distributed.internal.DistributionConfig;
-import com.gemstone.gemfire.internal.InternalDataSerializer;
-import com.gemstone.gemfire.internal.InternalInstantiator;
-import com.gemstone.gemfire.internal.cache.tier.sockets.DataSerializerPropogationDUnitTest;
-
-public class DistributedTestSupport {
-
- protected DistributedTestSupport() {
- }
-
- /**
- * Fetches the GemFireDescription for this test and adds its
- * DistributedSystem properties to the provided props parameter.
- *
- * @param config the properties to add hydra's test properties to
- */
- public static void addHydraProperties(Properties config) {
- Properties p = DUnitEnv.get().getDistributedSystemProperties();
- for (Iterator iter = p.entrySet().iterator();
- iter.hasNext(); ) {
- Map.Entry entry = (Map.Entry) iter.next();
- String key = (String) entry.getKey();
- String value = (String) entry.getValue();
- if (config.getProperty(key) == null) {
- config.setProperty(key, value);
- }
- }
- }
-
- public final static Properties getAllDistributedSystemProperties(Properties props) {
- Properties p = DUnitEnv.get().getDistributedSystemProperties();
-
- // our tests do not expect auto-reconnect to be on by default
- if (!p.contains(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME)) {
- p.put(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME, "true");
- }
-
- for (Iterator iter = props.entrySet().iterator();
- iter.hasNext(); ) {
- Map.Entry entry = (Map.Entry) iter.next();
- String key = (String) entry.getKey();
- Object value = entry.getValue();
- p.put(key, value);
- }
- return p;
- }
-
- /**
- * Get the port that the standard dunit locator is listening on.
- */
- public static int getDUnitLocatorPort() {
- return DUnitEnv.get().getLocatorPort();
- }
-
- public static void unregisterAllDataSerializersFromAllVms() {
- DistributedTestSupport.unregisterDataSerializerInThisVM();
- Invoke.invokeInEveryVM(new SerializableRunnable() {
- public void run() {
- DistributedTestSupport.unregisterDataSerializerInThisVM();
- }
- });
- Invoke.invokeInLocator(new SerializableRunnable() {
- public void run() {
- DistributedTestSupport.unregisterDataSerializerInThisVM();
- }
- });
- }
-
- public static void unregisterDataSerializerInThisVM() {
- DataSerializerPropogationDUnitTest.successfullyLoadedTestDataSerializer = false;
- // unregister all the Dataserializers
- InternalDataSerializer.reinitialize();
- // ensure that all are unregistered
- assertEquals(0, InternalDataSerializer.getSerializers().length);
- }
-
- public static void unregisterInstantiatorsInThisVM() {
- // unregister all the instantiators
- InternalInstantiator.reinitialize();
- assertEquals(0, InternalInstantiator.getInstantiators().length);
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestUtils.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestUtils.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestUtils.java
new file mode 100755
index 0000000..39c8ab3
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/DistributedTestUtils.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.test.dunit;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import com.gemstone.gemfire.distributed.DistributedSystem;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
+import com.gemstone.gemfire.distributed.internal.membership.gms.MembershipManagerHelper;
+import com.gemstone.gemfire.internal.InternalDataSerializer;
+import com.gemstone.gemfire.internal.InternalInstantiator;
+
+/**
+ * <code>DistributedTestUtils</code> provides static utility methods that
+ * affect the runtime environment or artifacts generated by a DistributedTest.
+ *
+ * These methods can be used directly: <code>DistributedTestUtils.crashDistributedSystem(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.DistributedTestUtils.*;
+ * ...
+ * crashDistributedSystem(...);
+ * </pre>
+ *
+ * Extracted from DistributedTestCase.
+ */
+public class DistributedTestUtils {
+
+ protected DistributedTestUtils() {
+ }
+
+ /**
+ * Fetches the GemFireDescription for this test and adds its
+ * DistributedSystem properties to the provided props parameter.
+ *
+ * @param properties the properties to add hydra's test properties to
+ */
+ public static void addHydraProperties(final Properties properties) {
+ Properties dsProperties = DUnitEnv.get().getDistributedSystemProperties();
+ for (Iterator<Map.Entry<Object, Object>> iter = dsProperties.entrySet().iterator(); iter.hasNext();) {
+ Map.Entry<Object, Object> entry = iter.next();
+ String key = (String) entry.getKey();
+ String value = (String) entry.getValue();
+ if (properties.getProperty(key) == null) {
+ properties.setProperty(key, value);
+ }
+ }
+ }
+
+ /**
+ * Crash the cache in the given VM in such a way that it immediately stops communicating with
+ * peers. This forces the VM's membership manager to throw a ForcedDisconnectException by
+ * forcibly terminating the JGroups protocol stack with a fake EXIT event.<p>
+ *
+ * NOTE: if you use this method be sure that you clean up the VM before the end of your
+ * test with disconnectFromDS() or disconnectAllFromDS().
+ */
+ public static void crashDistributedSystem(final DistributedSystem system) {
+ MembershipManagerHelper.crashDistributedSystem(system);
+ MembershipManagerHelper.inhibitForcedDisconnectLogging(false);
+ WaitCriterion wc = new WaitCriterion() {
+ public boolean done() {
+ return !system.isConnected();
+ }
+ public String description() {
+ return "Waiting for distributed system to finish disconnecting: " + system;
+ }
+ };
+ Wait.waitForCriterion(wc, 10000, 1000, true);
+ }
+
+ /**
+ * Crash the cache in the given VM in such a way that it immediately stops communicating with
+ * peers. This forces the VM's membership manager to throw a ForcedDisconnectException by
+ * forcibly terminating the JGroups protocol stack with a fake EXIT event.<p>
+ *
+ * NOTE: if you use this method be sure that you clean up the VM before the end of your
+ * test with disconnectFromDS() or disconnectAllFromDS().
+ */
+ @SuppressWarnings("serial")
+ public static boolean crashDistributedSystem(final VM vm) {
+ return vm.invoke(new SerializableCallable<Boolean>(DistributedTestUtils.class.getSimpleName() + " crashDistributedSystem") {
+ public Boolean call() throws Exception {
+ DistributedSystem system = InternalDistributedSystem.getAnyInstance();
+ crashDistributedSystem(system);
+ return true;
+ }
+ });
+ }
+
+ /**
+ * Delete locator state files. Use this after getting a random port
+ * to ensure that an old locator state file isn't picked up by the
+ * new locator you're starting.
+ */
+ public static void deleteLocatorStateFile(final int... ports) {
+ for (int index = 0; index < ports.length; index++) {
+ File stateFile = new File("locator"+ports[index]+"view.dat");
+ if (stateFile.exists()) {
+ stateFile.delete();
+ }
+ }
+ }
+
+ public final static Properties getAllDistributedSystemProperties(final Properties properties) {
+ Properties dsProperties = DUnitEnv.get().getDistributedSystemProperties();
+
+ // our tests do not expect auto-reconnect to be on by default
+ if (!dsProperties.contains(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME)) {
+ dsProperties.put(DistributionConfig.DISABLE_AUTO_RECONNECT_NAME, "true");
+ }
+
+ for (Iterator<Map.Entry<Object,Object>> iterator = properties.entrySet().iterator(); iterator.hasNext();) {
+ Map.Entry<Object,Object> entry = iterator.next();
+ String key = (String) entry.getKey();
+ Object value = entry.getValue();
+ dsProperties.put(key, value);
+ }
+ return dsProperties;
+ }
+
+ /**
+ * Get the port that the standard dunit locator is listening on.
+ */
+ public static int getDUnitLocatorPort() {
+ return DUnitEnv.get().getLocatorPort();
+ }
+
+ public static void unregisterAllDataSerializersFromAllVms() {
+ DistributedTestUtils.unregisterDataSerializerInThisVM();
+ Invoke.invokeInEveryVM(()->unregisterDataSerializerInThisVM());
+ Invoke.invokeInLocator(()->unregisterDataSerializerInThisVM());
+ }
+
+ public static void unregisterDataSerializerInThisVM() {
+ // TODO:KIRK: delete DataSerializerPropogationDUnitTest.successfullyLoadedTestDataSerializer = false;
+ // unregister all the Dataserializers
+ InternalDataSerializer.reinitialize();
+ // ensure that all are unregistered
+ assertEquals(0, InternalDataSerializer.getSerializers().length);
+ }
+
+ public static void unregisterInstantiatorsInThisVM() {
+ // unregister all the instantiators
+ InternalInstantiator.reinitialize();
+ assertEquals(0, InternalInstantiator.getInstantiators().length);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Host.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Host.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Host.java
index 4ec6165..95d6f0d 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Host.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/Host.java
@@ -16,6 +16,7 @@
*/
package com.gemstone.gemfire.test.dunit;
+import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -33,9 +34,9 @@ import com.gemstone.gemfire.test.dunit.standalone.RemoteDUnitVMIF;
* started on other hosts via additional Hydra configuration.</P>
*
* @author David Whitlock
- *
*/
-public abstract class Host implements java.io.Serializable {
+@SuppressWarnings("serial")
+public abstract class Host implements Serializable {
/** The available hosts */
protected static List hosts = new ArrayList();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/IgnoredException.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/IgnoredException.java b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/IgnoredException.java
index 175543a..d0cead6 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/IgnoredException.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/test/dunit/IgnoredException.java
@@ -19,41 +19,70 @@ package com.gemstone.gemfire.test.dunit;
import java.io.Serializable;
import java.util.concurrent.ConcurrentLinkedQueue;
-import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.logging.LogService;
/**
- * A class that represents an currently logged expected exception, which
- * should be removed
+ * <code>IgnoredException</code> provides static utility methods that
+ * will log messages to add or remove <code>IgnoredException</code>s.
+ * Each <code>IgnoredException</code> allows you to specify a suspect string
+ * that will be ignored by the <code>GrepLogs</code> utility which is run
+ * after each <code>DistributedTest</code> test method.
+ *
+ * These methods can be used directly:
+ * <code>IgnoredException.addIgnoredException(...)</code>,
+ * however, they are intended to be referenced through static import:
+ *
+ * <pre>
+ * import static com.gemstone.gemfire.test.dunit.IgnoredException.*;
+ * ...
+ * addIgnoredException(...);
+ * </pre>
+ *
+ * A test should use <code>addIgnoredException(...)</code> before executing
+ * the code that will potentially log the suspect string. The test should
+ * then <code>remove()</code> the <code>IgnoredException</code> immediately
+ * after. Note that <code>DistributedTestCase.tearDown()</code> will
+ * automatically remove all current <code>IgnoredException</code>s by
+ * invoking <code>removeAllIgnoredExceptions</code>.
+ *
+ * A suspect string is typically an Exception class and/or message string.
+ *
+ * The <code>GrepLogs</code> utility is part of Hydra which is not included
+ * in Apache Geode. The Hydra class which consumes logs and reports suspect
+ * strings is <code>batterytest.greplogs.GrepLogs</code>.
+ *
+ * Extracted from DistributedTestCase.
*
* @author Mitch Thomas
* @since 5.7bugfix
*/
+@SuppressWarnings("serial")
public class IgnoredException implements Serializable {
+
private static final Logger logger = LogService.getLogger();
- private static final long serialVersionUID = 1L;
- private final String errorString;
+ private final String suspectString;
private final transient VM vm;
private static ConcurrentLinkedQueue<IgnoredException> ignoredExceptions = new ConcurrentLinkedQueue<IgnoredException>();
- public IgnoredException(final String errorString) {
- this.errorString = errorString;
+ public IgnoredException(final String suspectString) {
+ this.suspectString = suspectString;
this.vm = null;
}
- IgnoredException(final String errorString, final VM vm) {
- this.errorString = errorString;
+ IgnoredException(final String suspectString, final VM vm) {
+ this.suspectString = suspectString;
this.vm = vm;
}
- String errorString() {
- return this.errorString;
+ String suspectString() {
+ return this.suspectString;
}
VM vm() {
@@ -61,130 +90,111 @@ public class IgnoredException implements Serializable {
}
public String getRemoveMessage() {
- return "<ExpectedException action=remove>" + errorString + "</ExpectedException>";
+ return "<ExpectedException action=remove>" + this.suspectString + "</ExpectedException>";
}
public String getAddMessage() {
- return "<ExpectedException action=add>" + errorString + "</ExpectedException>";
+ return "<ExpectedException action=add>" + this.suspectString + "</ExpectedException>";
}
public void remove() {
- SerializableRunnable removeRunnable = new SerializableRunnable(
- "removeExpectedExceptions") {
+ final String removeMessage = getRemoveMessage();
+
+ @SuppressWarnings("serial")
+ SerializableRunnable removeRunnable = new SerializableRunnable(IgnoredException.class.getSimpleName()+" remove") {
public void run() {
- final String remove = getRemoveMessage();
- final InternalDistributedSystem sys = InternalDistributedSystem
- .getConnectedInstance();
- if (sys != null) {
- sys.getLogWriter().info(remove);
+ // TODO: delete use of system.getLogWriter
+ DistributedSystem system = InternalDistributedSystem.getConnectedInstance();
+ if (system != null) {
+ system.getLogWriter().info(removeMessage);
}
+
+ // TODO: delete use of LogWriterUtils
try {
- LogWriterSupport.getLogWriter().info(remove);
+ LogWriterUtils.getLogWriter().info(removeMessage);
} catch (Exception noHydraLogger) {
}
- logger.info(remove);
+ logger.info(removeMessage);
}
};
+ removeRunnable.run();
+
if (this.vm != null) {
vm.invoke(removeRunnable);
- }
- else {
+ } else {
Invoke.invokeInEveryVM(removeRunnable);
}
- String s = getRemoveMessage();
- LogManager.getLogger(LogService.BASE_LOGGER_NAME).info(s);
- // log it locally
- final InternalDistributedSystem sys = InternalDistributedSystem
- .getConnectedInstance();
- if (sys != null) { // avoid creating a system
- sys.getLogWriter().info(s);
- }
- LogWriterSupport.getLogWriter().info(s);
}
public static void removeAllExpectedExceptions() {
- IgnoredException ex;
- while((ex = ignoredExceptions.poll()) != null) {
- ex.remove();
+ IgnoredException ignoredException;
+ while ((ignoredException = ignoredExceptions.poll()) != null) {
+ ignoredException.remove();
}
}
/**
* Log in all VMs, in both the test logger and the GemFire logger the
- * expected exception string to prevent grep logs from complaining. The
- * expected string is used by the GrepLogs utility and so can contain
+ * ignored exception string to prevent grep logs from complaining. The
+ * suspect string is used by the GrepLogs utility and so can contain
* regular expression characters.
*
* @since 5.7bugfix
- * @param exception
- * the exception string to expect
- * @param v
- * the VM on which to log the expected exception or null for all VMs
- * @return an ExpectedException instance for removal purposes
+ * @param suspectString the exception string to expect
+ * @param vm the VM on which to log the expected exception or null for all VMs
+ * @return an IgnoredException instance for removal purposes
*/
- public static IgnoredException addIgnoredException(final String exception,
- VM v) {
- final IgnoredException ret;
- if (v != null) {
- ret = new IgnoredException(exception, v);
- }
- else {
- ret = new IgnoredException(exception);
- }
- // define the add and remove expected exceptions
- final String add = ret.getAddMessage();
- SerializableRunnable addRunnable = new SerializableRunnable(
- "addExpectedExceptions") {
+ public static IgnoredException addIgnoredException(final String suspectString, final VM vm) {
+ final IgnoredException ignoredException = new IgnoredException(suspectString, vm);
+ final String addMessage = ignoredException.getAddMessage();
+
+ @SuppressWarnings("serial")
+ SerializableRunnable addRunnable = new SerializableRunnable(IgnoredException.class.getSimpleName()+" addIgnoredException") {
public void run() {
- final InternalDistributedSystem sys = InternalDistributedSystem
- .getConnectedInstance();
- if (sys != null) {
- sys.getLogWriter().info(add);
+ // TODO: delete use of system.getLogWriter
+ DistributedSystem system = InternalDistributedSystem.getConnectedInstance();
+ if (system != null) {
+ system.getLogWriter().info(addMessage);
}
+
+ // TODO: delete use of LogWriterUtils
try {
- LogWriterSupport.getLogWriter().info(add);
+ LogWriterUtils.getLogWriter().info(addMessage);
} catch (Exception noHydraLogger) {
}
- logger.info(add);
+ logger.info(addMessage);
}
};
- if (v != null) {
- v.invoke(addRunnable);
- }
- else {
+
+ addRunnable.run();
+
+ if (vm != null) {
+ vm.invoke(addRunnable);
+ } else {
Invoke.invokeInEveryVM(addRunnable);
}
- LogManager.getLogger(LogService.BASE_LOGGER_NAME).info(add);
- // Log it locally too
- final InternalDistributedSystem sys = InternalDistributedSystem
- .getConnectedInstance();
- if (sys != null) { // avoid creating a cache
- sys.getLogWriter().info(add);
- }
- LogWriterSupport.getLogWriter().info(add);
- ignoredExceptions.add(ret);
- return ret;
+ ignoredExceptions.add(ignoredException);
+ return ignoredException;
}
/**
* Log in all VMs, in both the test logger and the GemFire logger the
- * expected exception string to prevent grep logs from complaining. The
- * expected string is used by the GrepLogs utility and so can contain
+ * ignored exception string to prevent grep logs from complaining. The
+ * suspect string is used by the GrepLogs utility and so can contain
* regular expression characters.
*
- * If you do not remove the expected exception, it will be removed at the
+ * If you do not remove the ignored exception, it will be removed at the
* end of your test case automatically.
*
* @since 5.7bugfix
- * @param exception
- * the exception string to expect
- * @return an ExpectedException instance for removal
+ * @param suspectString the exception string to expect
+ * @return an IgnoredException instance for removal
*/
- public static IgnoredException addIgnoredException(final String exception) {
- return addIgnoredException(exception, null);
+ public static IgnoredException addIgnoredException(final String suspectString) {
+ return addIgnoredException(suspectString, null);
}
}
\ No newline at end of file
[19/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/MemberFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/MemberFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/MemberFunctionExecutionDUnitTest.java
index 9fbab8c..1f3ed17 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/MemberFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/MemberFunctionExecutionDUnitTest.java
@@ -50,7 +50,7 @@ import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -168,7 +168,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
resultCollector.getResult();
fail("Should have received FunctionException due to class not found");
} catch (FunctionException expected) {
- LogWriterSupport.getLogWriter().warning("received wrong exception cause", expected.getCause());
+ LogWriterUtils.getLogWriter().warning("received wrong exception cause", expected.getCause());
assertTrue((expected.getCause() instanceof ClassNotFoundException));
}
}
@@ -387,7 +387,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
}
});
List li = (ArrayList)rc.getResult();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"MemberFunctionExecutionDUnitTest#excuteOnMembers: Result : " + li);
assertEquals(li.size(), 1);
for (Object obj : li) {
@@ -396,7 +396,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
ds.disconnect();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : "+ e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : "+ e.getMessage());
e.printStackTrace();
Assert.fail("Test failed",e);
}
@@ -444,7 +444,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
try {
ResultCollector rc = executor.execute(function.getId());
List li = (ArrayList)rc.getResult();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"MemberFunctionExecutionDUnitTest#excuteOnMembers: Result : " + li);
assertEquals(li.size(), noOfMembers.intValue());
for (Object obj : li) {
@@ -452,7 +452,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : "+ e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : "+ e.getMessage());
e.printStackTrace();
Assert.fail("Test failed",e);
}
@@ -484,7 +484,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
try {
ResultCollector rc = memberExcution.withArgs(Boolean.TRUE).execute(function);
List li = (ArrayList)rc.getResult();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"MemberFunctionExecutionDUnitTest#excuteOnMembers: Result : " + li);
assertEquals(noOfMembers.intValue(), li.size());
for (Object obj : li) {
@@ -492,7 +492,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : "+ e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : "+ e.getMessage());
e.printStackTrace();
Assert.fail("Test failed",e);
}
@@ -586,7 +586,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
}
});
List li = (ArrayList)rc.getResult();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"MemberFunctionExecutionDUnitTest#excuteOnMembers: Result : " + li);
assertEquals(li.size(), noOfMembers.intValue());
for (Object obj : li) {
@@ -594,7 +594,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : "+ e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : "+ e.getMessage());
e.printStackTrace();
Assert.fail("Test failed",e);
}
@@ -614,7 +614,7 @@ public class MemberFunctionExecutionDUnitTest extends CacheTestCase {
fail("Test Failed");
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().info("Exception Occured : "+ expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : "+ expected.getMessage());
// boolean check = expected.getMessage().equals("Cannot return any result, as Function.hasResult() is false");
assertTrue(expected.getMessage().equals(LocalizedStrings.ExecuteFunction_CANNOT_0_RESULTS_HASRESULT_FALSE
.toLocalizedString("return any")));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/OnGroupsFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/OnGroupsFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/OnGroupsFunctionExecutionDUnitTest.java
index 9e748d9..5feec25 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/OnGroupsFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/OnGroupsFunctionExecutionDUnitTest.java
@@ -46,7 +46,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -88,7 +88,7 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
@Override
public void execute(FunctionContext context) {
- LogWriterSupport.getLogWriter().fine("SWAP:1:executing OnGroupsFunction:"+invocationCount);
+ LogWriterUtils.getLogWriter().fine("SWAP:1:executing OnGroupsFunction:"+invocationCount);
InternalDistributedSystem ds = InternalDistributedSystem.getConnectedInstance();
synchronized (OnGroupsFunction.class) {
invocationCount++;
@@ -233,7 +233,7 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
vm0.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().fine("SWAP:invoking on gm");
+ LogWriterUtils.getLogWriter().fine("SWAP:invoking on gm");
DistributedSystem ds = getSystem();
try {
FunctionService.onMember("no such group");
@@ -260,7 +260,7 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
@Override
public Object call() throws Exception {
DistributedSystem ds = getSystem();
- LogWriterSupport.getLogWriter().fine("SWAP:invoking on g0");
+ LogWriterUtils.getLogWriter().fine("SWAP:invoking on g0");
Execution e = FunctionService.onMembers("g0");
ArrayList<String> args = new ArrayList<String>();
args.add("g0");
@@ -300,7 +300,7 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
vm0.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().fine("SWAP:invoking on g0 g1");
+ LogWriterUtils.getLogWriter().fine("SWAP:invoking on g0 g1");
InternalDistributedSystem ds = InternalDistributedSystem.getConnectedInstance();
Execution e = FunctionService.onMembers("g0", "g1");
ArrayList<String> args = new ArrayList<String>();
@@ -685,11 +685,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
c.getLogger().info("SWAP:invoking function from client on g0");
@@ -783,11 +783,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
c.getLogger().info("SWAP:invoking function from client on g0");
@@ -860,11 +860,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
IgnoredException ex = IgnoredException.addIgnoredException("No member found");
@@ -940,11 +940,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
IgnoredException expected = IgnoredException.addIgnoredException("No member found");
@@ -1017,11 +1017,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
Execution e = InternalFunctionService.onServers(c, "g1");
@@ -1066,11 +1066,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
Execution e = InternalFunctionService.onServers(c, "g1");
@@ -1115,11 +1115,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
Execution e = InternalFunctionService.onServers(c, "g1");
@@ -1129,7 +1129,7 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
e = e.withArgs(args);
((AbstractExecution)e).setIgnoreDepartedMembers(true);
ArrayList l = (ArrayList) e.execute(new OnGroupsExceptionFunction()).getResult();
- LogWriterSupport.getLogWriter().info("SWAP:result:"+l);
+ LogWriterUtils.getLogWriter().info("SWAP:result:"+l);
assertEquals(2, l.size());
if (l.get(0) instanceof Throwable) {
assertTrue((Boolean) l.get(1));
@@ -1182,11 +1182,11 @@ public class OnGroupsFunctionExecutionDUnitTest extends DistributedTestCase {
} catch (CacheClosedException cce) {
}
disconnectFromDS();
- LogWriterSupport.getLogWriter().fine("SWAP:creating client cache");
+ LogWriterUtils.getLogWriter().fine("SWAP:creating client cache");
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolLocator(hostName, locatorPort);
ccf.setPoolServerGroup("mg");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache c = ccf.create();
c.getLogger().info("SWAP:invoking function from client on g0");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerFunctionExecutionNoAckDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerFunctionExecutionNoAckDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerFunctionExecutionNoAckDUnitTest.java
index 0b3d95a..d33dc47 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerFunctionExecutionNoAckDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerFunctionExecutionNoAckDUnitTest.java
@@ -31,7 +31,7 @@ import com.gemstone.gemfire.cache.execute.ResultCollector;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.cache.functions.TestFunction;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServerTestBase{
/**
@@ -67,7 +67,7 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
isByName = new Boolean(true);
toRegister = new Boolean(true);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRClientServerFunctionExecutionNoAckDUnitTest#testServerFunctionExecution_NoAck : Starting test");
client.invoke(PRClientServerFunctionExecutionNoAckDUnitTest.class,
@@ -85,7 +85,7 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
registerFunctionAtServer(functionAck);
toRegister = new Boolean(false);
isByName = new Boolean(true);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRClientServerFunctionExecutionNoAckDUnitTest#testServerFunctionExecution_NoAck : Starting test");
client.invoke(PRClientServerFunctionExecutionNoAckDUnitTest.class,
@@ -95,7 +95,7 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
}
private void createScenario() {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRClientServerFFunctionExecutionDUnitTest#createScenario : creating scenario");
createClientServerScenarionWithoutRegion();
@@ -118,11 +118,11 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
for(int i=0;i< NUM_ITERATION;i++)
execute(member, Boolean.TRUE, functionNoAck, isByName, toRegister);
t.stop();
- LogWriterSupport.getLogWriter().info("Time taken to execute boolean based" + NUM_ITERATION + "NoAck functions :" + t.getTimeInMs());
+ LogWriterUtils.getLogWriter().info("Time taken to execute boolean based" + NUM_ITERATION + "NoAck functions :" + t.getTimeInMs());
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
@@ -136,11 +136,11 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
for(int i=0;i< NUM_ITERATION;i++)
execute(member, testKeysSet, functionNoAck, isByName, toRegister);
t.stop();
- LogWriterSupport.getLogWriter().info("Time taken to execute setbased" + NUM_ITERATION + "NoAck functions :" + t.getTimeInMs());
+ LogWriterUtils.getLogWriter().info("Time taken to execute setbased" + NUM_ITERATION + "NoAck functions :" + t.getTimeInMs());
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operationssssss");
}
if(toRegister.booleanValue()){
@@ -159,11 +159,11 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
timeinms += t.getTimeInMs();
assertEquals(Boolean.TRUE, ((List)rc.getResult()).get(0));
}
- LogWriterSupport.getLogWriter().info("Time taken to execute boolean based" + NUM_ITERATION + "haveResults functions :" + timeinms);
+ LogWriterUtils.getLogWriter().info("Time taken to execute boolean based" + NUM_ITERATION + "haveResults functions :" + timeinms);
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
@@ -185,11 +185,11 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
}
}
- LogWriterSupport.getLogWriter().info("Time taken to execute setbased" + NUM_ITERATION + "haveResults functions :" + timeinms);
+ LogWriterUtils.getLogWriter().info("Time taken to execute setbased" + NUM_ITERATION + "haveResults functions :" + timeinms);
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operationssssss");
}
}
@@ -212,7 +212,7 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation allserver ");
}
@@ -225,7 +225,7 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
}
@@ -235,15 +235,15 @@ public class PRClientServerFunctionExecutionNoAckDUnitTest extends PRClientServe
throws Exception {
if (isByName.booleanValue()) {// by name
if(toRegister.booleanValue()){
- LogWriterSupport.getLogWriter().info("The function name to execute : " + function.getId());
+ LogWriterUtils.getLogWriter().info("The function name to execute : " + function.getId());
Execution me = member.withArgs(args);
- LogWriterSupport.getLogWriter().info("The args passed : " + args);
+ LogWriterUtils.getLogWriter().info("The args passed : " + args);
return me.execute(function.getId());
}
else {
- LogWriterSupport.getLogWriter().info("The function name to execute : (without Register) " + function.getId());
+ LogWriterUtils.getLogWriter().info("The function name to execute : (without Register) " + function.getId());
Execution me = member.withArgs(args);
- LogWriterSupport.getLogWriter().info("The args passed : " + args);
+ LogWriterUtils.getLogWriter().info("The args passed : " + args);
return me.execute(function.getId(), function.hasResult(),function.isHA(),function.optimizeForWrite());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionDUnitTest.java
index 375d755..be5b082 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionDUnitTest.java
@@ -59,7 +59,7 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -489,7 +489,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
if (!(ex.getCause() instanceof ServerConnectivityException)
&& !(ex.getCause() instanceof FunctionInvocationTargetException)) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
}
@@ -525,7 +525,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
assertEquals(1, ((List)rs.getResult()).size());
} catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the execute operation", ex);
}
}
@@ -560,7 +560,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
assertEquals(1, ((List)rs.getResult()).size());
} catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
}
@@ -672,7 +672,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
String excuse;
public boolean done() {
int sz = pool.getConnectedServerCount();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Checking for the Live Servers : Expected : " + expectedLiveServers
+ " Available :" + sz);
if (sz == expectedLiveServers.intValue()) {
@@ -705,7 +705,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
ResultCollector rc1 = dataSet.withFilter(testKeysSet).withArgs(Boolean.TRUE).execute(
function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
@@ -713,7 +713,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Got an exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Got an exception : " + e.getMessage());
assertTrue(e instanceof EOFException || e instanceof SocketException
|| e instanceof SocketTimeoutException
|| e instanceof ServerException || e instanceof IOException
@@ -734,7 +734,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
ResultCollector rc1 = dataSet.withFilter(testKeysSet).withArgs(Boolean.TRUE).execute(
function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
return l;
}
@@ -782,7 +782,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
PartitionedRegion region = (PartitionedRegion)cache.getRegion(PartitionedRegionName);
HashMap localBucket2RegionMap = (HashMap)region
.getDataStore().getSizeLocally();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + PartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
Set entrySet = localBucket2RegionMap.entrySet();
@@ -825,7 +825,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
.iterator().next());
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
expected.printStackTrace();
fail("Test failed after the put operation");
}
@@ -854,7 +854,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -906,7 +906,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertTrue(i.next() instanceof MyFunctionExecutionException);
@@ -991,7 +991,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -1027,7 +1027,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
}
@@ -1071,13 +1071,13 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}
});
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
}catch(Exception e){
- LogWriterSupport.getLogWriter().info("Exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + e.getMessage());
e.printStackTrace();
fail("Test failed after the put operation");
@@ -1249,7 +1249,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
}
@@ -1304,7 +1304,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
@@ -1324,7 +1324,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
@@ -1386,7 +1386,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
@@ -1498,7 +1498,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
});
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().fine("Exception occured : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().fine("Exception occured : " + expected.getMessage());
assertTrue(expected.getMessage().contains(
"No target node found for KEY = " + testKey)
|| expected.getMessage()
@@ -1560,7 +1560,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
}
@@ -1616,7 +1616,7 @@ public class PRClientServerRegionFunctionExecutionDUnitTest extends PRClientServ
});
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().fine("Exception occured : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().fine("Exception occured : " + expected.getMessage());
assertTrue(expected.getCause().getMessage().contains(
"Could not create an instance of com.gemstone.gemfire.internal.cache.execute.PRClientServerRegionFunctionExecutionDUnitTest$UnDeserializable"));
} finally {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionFailoverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionFailoverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionFailoverDUnitTest.java
index e97716d..3615ce4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionFailoverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionFailoverDUnitTest.java
@@ -46,13 +46,13 @@ import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
@@ -116,7 +116,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 6 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 6 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -156,7 +156,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 5 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -194,7 +194,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
Function function = new TestFunction(true,
TestFunction.TEST_FUNCTION_HA_REGION);
@@ -225,7 +225,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
Function function = new TestFunction(true,
TestFunction.TEST_FUNCTION_HA_REGION);
@@ -257,7 +257,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
Function function = new TestFunction(true,
TestFunction.TEST_FUNCTION_NONHA_REGION);
@@ -290,7 +290,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
//Make sure the buckets are created.
client.invoke(new SerializableRunnable() {
@@ -355,7 +355,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
final int portLocator = AvailablePort
.getRandomAvailablePort(AvailablePort.SOCKET);
- final String hostLocator = NetworkSupport.getServerHostName(server1.getHost());
+ final String hostLocator = NetworkUtils.getServerHostName(server1.getHost());
final String locator = hostLocator + "[" + portLocator + "]";
startLocatorInVM(portLocator);
@@ -413,7 +413,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
final int portLocator = AvailablePort
.getRandomAvailablePort(AvailablePort.SOCKET);
- final String hostLocator = NetworkSupport.getServerHostName(server1.getHost());
+ final String hostLocator = NetworkUtils.getServerHostName(server1.getHost());
final String locator = hostLocator + "[" + portLocator + "]";
startLocatorInVM(portLocator);
@@ -460,7 +460,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
File logFile = new File("locator-" + locatorPort + ".log");
Properties props = new Properties();
- props = DistributedTestSupport.getAllDistributedSystemProperties(props);
+ props = DistributedTestUtils.getAllDistributedSystemProperties(props);
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
try {
@@ -505,7 +505,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(regionName, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + regionName + " created Successfully :"
+ region.toString());
return port;
@@ -538,7 +538,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
RegionAttributes attrs = factory.create();
region = cache.createRegion(regionName, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + regionName + " created Successfully :"
+ region.toString());
}
@@ -547,7 +547,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
for(int i = 0 ; i < 113; i++){
region.put(i, "KB_"+i);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + regionName + " Have size :"
+ region.size());
}
@@ -556,7 +556,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
Execution execute = FunctionService.onRegion(region);
ResultCollector rc = execute.withArgs(Boolean.TRUE).execute(
new TestFunction(true, TestFunction.TEST_FUNCTION_LASTRESULT));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Exeuction Result :"
+ rc.getResult());
List l = ((List)rc.getResult());
@@ -564,7 +564,7 @@ public class PRClientServerRegionFunctionExecutionFailoverDUnitTest extends
}
public static void checkSize(){
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + regionName + " Have size :"
+ region.size());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest.java
index 728d1fd..f1d354b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest.java
@@ -50,8 +50,8 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -367,7 +367,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 6 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 6 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -415,7 +415,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 5 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -520,7 +520,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
public boolean done() {
int sz = pool.getConnectedServerCount();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Checking for the Live Servers : Expected : "
+ expectedLiveServers + " Available :" + sz);
if (sz == expectedLiveServers.intValue()) {
@@ -574,7 +574,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Got an exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Got an exception : " + e.getMessage());
assertTrue(e instanceof EOFException || e instanceof SocketException
|| e instanceof SocketTimeoutException
|| e instanceof ServerException || e instanceof IOException
@@ -595,7 +595,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
ResultCollector rc1 = dataSet.withFilter(testKeysSet)
.withArgs(Boolean.TRUE).execute(function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
return l;
}
@@ -632,7 +632,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
.getRegion(PartitionedRegionName);
HashMap localBucket2RegionMap = (HashMap)region.getDataStore()
.getSizeLocally();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + PartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
Set entrySet = localBucket2RegionMap.entrySet();
@@ -661,8 +661,8 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
ResultCollector rc1 = executeOnAll(dataSet, Boolean.TRUE, function,
isByName);
List resultList = (List)((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + resultList.size());
- LogWriterSupport.getLogWriter().info("Result are SSSS : " + resultList);
+ LogWriterUtils.getLogWriter().info("Result size : " + resultList.size());
+ LogWriterUtils.getLogWriter().info("Result are SSSS : " + resultList);
assertEquals(3, resultList.size());
Iterator resultIterator = resultList.iterator();
@@ -799,7 +799,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
.iterator().next());
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
expected.printStackTrace();
fail("Test failed after the put operation");
}
@@ -829,7 +829,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -882,7 +882,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -921,7 +921,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
@@ -964,14 +964,14 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
}
});
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + e.getMessage());
e.printStackTrace();
fail("Test failed after the put operation");
@@ -1058,7 +1058,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
}
catch (FunctionException expected) {
expected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
assertTrue(expected.getMessage().startsWith(
(LocalizedStrings.ExecuteFunction_CANNOT_0_RESULTS_HASRESULT_FALSE
.toLocalizedString("return any"))));
@@ -1119,7 +1119,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
@@ -1183,7 +1183,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
});
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().fine("Exception occured : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().fine("Exception occured : " + expected.getMessage());
assertTrue(expected.getMessage().contains(
"No target node found for KEY = " + testKey)
|| expected.getMessage()
@@ -1245,7 +1245,7 @@ public class PRClientServerRegionFunctionExecutionNoSingleHopDUnitTest extends
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest.java
index 79ff701..8e2d2d4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest.java
@@ -50,8 +50,8 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -328,7 +328,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 6 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 6 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -380,7 +380,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 5 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -485,7 +485,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
public boolean done() {
int sz = pool.getConnectedServerCount();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Checking for the Live Servers : Expected : "
+ expectedLiveServers + " Available :" + sz);
if (sz == expectedLiveServers.intValue()) {
@@ -539,7 +539,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Got an exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Got an exception : " + e.getMessage());
assertTrue(e instanceof EOFException || e instanceof SocketException
|| e instanceof SocketTimeoutException
|| e instanceof ServerException || e instanceof IOException
@@ -560,7 +560,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
ResultCollector rc1 = dataSet.withFilter(testKeysSet)
.withArgs(Boolean.TRUE).execute(function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
return l;
}
@@ -591,7 +591,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
.getRegion(PartitionedRegionName);
HashMap localBucket2RegionMap = (HashMap)region.getDataStore()
.getSizeLocally();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + PartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
Set entrySet = localBucket2RegionMap.entrySet();
@@ -620,8 +620,8 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
ResultCollector rc1 = executeOnAll(dataSet, Boolean.TRUE, function,
isByName);
List resultList = (List)((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + resultList.size());
- LogWriterSupport.getLogWriter().info("Result are SSSS : " + resultList);
+ LogWriterUtils.getLogWriter().info("Result size : " + resultList.size());
+ LogWriterUtils.getLogWriter().info("Result are SSSS : " + resultList);
assertEquals(3, resultList.size());
Iterator resultIterator = resultList.iterator();
@@ -758,7 +758,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
.iterator().next());
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
expected.printStackTrace();
fail("Test failed after the put operation");
}
@@ -788,7 +788,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -840,7 +840,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -879,7 +879,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
@@ -922,14 +922,14 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
}
});
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + e.getMessage());
e.printStackTrace();
fail("Test failed after the put operation");
@@ -1016,7 +1016,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
}
catch (FunctionException expected) {
expected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
assertTrue(expected.getMessage().startsWith(
(LocalizedStrings.ExecuteFunction_CANNOT_0_RESULTS_HASRESULT_FALSE
.toLocalizedString("return any"))));
@@ -1077,7 +1077,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
@@ -1141,7 +1141,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
});
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().fine("Exception occured : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().fine("Exception occured : " + expected.getMessage());
assertTrue(expected.getMessage().contains(
"No target node found for KEY = " + testKey)
|| expected.getMessage()
@@ -1203,7 +1203,7 @@ public class PRClientServerRegionFunctionExecutionSelectorNoSingleHopDUnitTest e
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
Assert.fail("Test failed after the put operation", ex);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSingleHopDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSingleHopDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSingleHopDUnitTest.java
index 50fa452..1afd8f5 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSingleHopDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerRegionFunctionExecutionSingleHopDUnitTest.java
@@ -50,8 +50,8 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
public class PRClientServerRegionFunctionExecutionSingleHopDUnitTest extends PRClientServerTestBase {
@@ -353,7 +353,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
client.invoke(PRClientServerRegionFunctionExecutionDUnitTest.class,
"verifyDeadAndLiveServers", new Object[] { new Integer(1),
new Integer(2) });
- Threads.join(async[0], 6 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 6 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -382,7 +382,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
server3.invoke(PRClientServerRegionFunctionExecutionSingleHopDUnitTest.class, "startServerHA");
server1.invoke(PRClientServerRegionFunctionExecutionSingleHopDUnitTest.class, "closeCacheHA");
client.invoke(PRClientServerRegionFunctionExecutionSingleHopDUnitTest.class, "verifyDeadAndLiveServers",new Object[]{new Integer(1),new Integer(2)});
- Threads.join(async[0], 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 5 * 60 * 1000);
if(async[0].getException() != null){
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -486,7 +486,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
String excuse;
public boolean done() {
int sz = pool.getConnectedServerCount();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Checking for the Live Servers : Expected : " + expectedLiveServers
+ " Available :" + sz);
if (sz == expectedLiveServers.intValue()) {
@@ -536,7 +536,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Got an exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Got an exception : " + e.getMessage());
assertTrue(e instanceof EOFException || e instanceof SocketException
|| e instanceof SocketTimeoutException
|| e instanceof ServerException || e instanceof IOException
@@ -557,7 +557,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
ResultCollector rc1 = dataSet.withFilter(testKeysSet).withArgs(Boolean.TRUE).execute(
function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
return l;
}
@@ -592,7 +592,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
PartitionedRegion region = (PartitionedRegion)cache.getRegion(PartitionedRegionName);
HashMap localBucket2RegionMap = (HashMap)region
.getDataStore().getSizeLocally();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + PartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
Set entrySet = localBucket2RegionMap.entrySet();
@@ -620,8 +620,8 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
}
ResultCollector rc1 = executeOnAll(dataSet, Boolean.TRUE, function, isByName);
List resultList = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + resultList.size());
- LogWriterSupport.getLogWriter().info("Result are SSSS : " + resultList);
+ LogWriterUtils.getLogWriter().info("Result size : " + resultList.size());
+ LogWriterUtils.getLogWriter().info("Result are SSSS : " + resultList);
assertEquals(3, resultList.size());
// while (resultIterator.hasNext()) {
@@ -751,7 +751,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
.iterator().next());
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
expected.printStackTrace();
fail("Test failed after the put operation");
}
@@ -780,7 +780,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
List l = null;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE, function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -830,7 +830,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
ResultCollector rc1 = execute(dataSet, testKeysSet, Boolean.TRUE,
function, isByName);
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
@@ -866,7 +866,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
}
@@ -910,13 +910,13 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
}
});
l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info("Result size : " + l.size());
+ LogWriterUtils.getLogWriter().info("Result size : " + l.size());
assertEquals(3, l.size());
for (Iterator i = l.iterator(); i.hasNext();) {
assertEquals(Boolean.TRUE, i.next());
}
}catch(Exception e){
- LogWriterSupport.getLogWriter().info("Exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + e.getMessage());
e.printStackTrace();
fail("Test failed after the put operation");
@@ -1003,7 +1003,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
fail("Test failed after the put operation");
} catch(FunctionException expected) {
expected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + expected.getMessage());
assertTrue(expected.getMessage().startsWith((LocalizedStrings.ExecuteFunction_CANNOT_0_RESULTS_HASRESULT_FALSE
.toLocalizedString("return any"))));
}
@@ -1061,7 +1061,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
}
@@ -1128,7 +1128,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
});
}
catch (Exception expected) {
- LogWriterSupport.getLogWriter().fine("Exception occured : " + expected.getMessage());
+ LogWriterUtils.getLogWriter().fine("Exception occured : " + expected.getMessage());
assertTrue(expected.getMessage().contains(
"No target node found for KEY = " + testKey)
|| expected.getMessage()
@@ -1190,7 +1190,7 @@ import com.gemstone.gemfire.test.dunit.WaitCriterion;
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
Assert.fail("Test failed after the put operation",ex);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerTestBase.java
index 79d8bbb..4bee088 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRClientServerTestBase.java
@@ -53,8 +53,8 @@ import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -412,7 +412,7 @@ public class PRClientServerTestBase extends CacheTestCase {
public static void createCacheClientWithoutRegion(String host, Integer port1,
Integer port2, Integer port3) throws Exception {
CacheServerTestUtil.disableShufflingOfEndpoints();
- LogWriterSupport.getLogWriter().info("PRClientServerTestBase#createCacheClientWithoutRegion : creating pool");
+ LogWriterUtils.getLogWriter().info("PRClientServerTestBase#createCacheClientWithoutRegion : creating pool");
serverPort1 = port1;
serverPort2 = port2;
serverPort3 = port3;
@@ -435,7 +435,7 @@ public class PRClientServerTestBase extends CacheTestCase {
public static void createCacheClientWithDistributedRegion(String host, Integer port1,
Integer port2, Integer port3) throws Exception {
CacheServerTestUtil.disableShufflingOfEndpoints();
- LogWriterSupport.getLogWriter().info("PRClientServerTestBase#createCacheClientWithoutRegion : creating pool");
+ LogWriterUtils.getLogWriter().info("PRClientServerTestBase#createCacheClientWithoutRegion : creating pool");
serverPort1 = port1;
serverPort2 = port2;
serverPort3 = port3;
@@ -477,7 +477,7 @@ public class PRClientServerTestBase extends CacheTestCase {
serverPort2 = port2;
serverPort3 = port3;
client.invoke(PRClientServerTestBase.class, "createCacheClient",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
}
@@ -492,7 +492,7 @@ public class PRClientServerTestBase extends CacheTestCase {
new Object[] {commonAttributes ,new Integer(localMaxMemoryServer2) });
serverPort1 = port1;
client.invoke(PRClientServerTestBase.class, "createCacheClient_SingleConnection",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1});
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1});
}
@@ -513,7 +513,7 @@ public class PRClientServerTestBase extends CacheTestCase {
serverPort2 = port2;
serverPort3 = port3;
client.invoke(PRClientServerTestBase.class, "createCacheClientWith2Regions",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
}
@@ -562,7 +562,7 @@ public class PRClientServerTestBase extends CacheTestCase {
serverPort2 = port2;
serverPort3 = port3;
client.invoke(PRClientServerTestBase.class, "createNoSingleHopCacheClient",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
}
@@ -582,13 +582,13 @@ public class PRClientServerTestBase extends CacheTestCase {
serverPort2 = port2;
serverPort3 = port3;
client.invoke(PRClientServerTestBase.class, "createNoSingleHopCacheClient",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
}
protected void createClientServerScenarionWithoutRegion () {
- LogWriterSupport.getLogWriter().info("PRClientServerTestBase#createClientServerScenarionWithoutRegion : creating client server");
+ LogWriterUtils.getLogWriter().info("PRClientServerTestBase#createClientServerScenarionWithoutRegion : creating client server");
createCacheInClientServer();
Integer port1 = (Integer)server1.invoke(PRClientServerTestBase.class,
"createCacheServer");
@@ -601,12 +601,12 @@ public class PRClientServerTestBase extends CacheTestCase {
serverPort3 = port3;
client.invoke(PRClientServerTestBase.class, "createCacheClientWithoutRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
}
protected void createClientServerScenarionWithDistributedtRegion () {
- LogWriterSupport.getLogWriter().info("PRClientServerTestBase#createClientServerScenarionWithoutRegion : creating client server");
+ LogWriterUtils.getLogWriter().info("PRClientServerTestBase#createClientServerScenarionWithoutRegion : creating client server");
createCacheInClientServer();
Integer port1 = (Integer)server1.invoke(PRClientServerTestBase.class,
"createCacheServerWithDR");
@@ -620,7 +620,7 @@ public class PRClientServerTestBase extends CacheTestCase {
client.invoke(PRClientServerTestBase.class, "createCacheClientWithDistributedRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
}
@@ -696,11 +696,11 @@ public class PRClientServerTestBase extends CacheTestCase {
};
Wait.waitForCriterion(wc, 2000, 500, false);
Collection bridgeServers = cache.getCacheServers();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Start Server Bridge Servers list : " + bridgeServers.size());
Iterator bridgeIterator = bridgeServers.iterator();
CacheServer bridgeServer = (CacheServer)bridgeIterator.next();
- LogWriterSupport.getLogWriter().info("start Server Bridge Server" + bridgeServer);
+ LogWriterUtils.getLogWriter().info("start Server Bridge Server" + bridgeServer);
try {
bridgeServer.start();
}
[27/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
index 76da571..07807c8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -109,7 +109,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
public void testPRWithLocalAndRemoteException()
throws Exception {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
@@ -120,7 +120,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vmList.add(vm1);
vmList.add(vm0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp
@@ -128,17 +128,17 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vm1.invoke(PRQHelp
.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
@@ -148,29 +148,29 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
// Execute query first time. This is to make sure all the buckets are
// created
// (lazy bucket creation).
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying on VM0 First time");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
@@ -220,7 +220,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
gotException = true;
if (ex.getMessage().contains("local node")) {
// ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from local node successfully.");
+ LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from local node successfully.");
} else {
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from local node rather received", ex);
}
@@ -232,14 +232,14 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
}
);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
public void testRemoteException() throws Exception {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
@@ -250,7 +250,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vmList.add(vm1);
vmList.add(vm0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp
@@ -258,17 +258,17 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vm1.invoke(PRQHelp
.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
@@ -278,22 +278,22 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
@@ -351,7 +351,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
gotException = true;
if (ex.getMessage().contains("remote node")) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from remote node successfully.");
+ LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from remote node successfully.");
} else {
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from remote node rather received", ex);
}
@@ -363,14 +363,14 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
}
);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
public void testCacheCloseExceptionFromLocalAndRemote() throws Exception {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
@@ -381,7 +381,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vmList.add(vm1);
vmList.add(vm0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp
@@ -389,17 +389,17 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vm1.invoke(PRQHelp
.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
@@ -409,22 +409,22 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
@@ -440,7 +440,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
@Override
public void afterIterationEvaluation(Object result) {
- LogWriterSupport.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
+ LogWriterUtils.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
if (noOfAccess > 2) {
PRQHelp.getCache().getRegion(name).destroyRegion();
}
@@ -466,7 +466,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
@Override
public void afterIterationEvaluation(Object result) {
//Object region = ((DefaultQuery)query).getRegionsInQuery(null).iterator().next();
- LogWriterSupport.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
+ LogWriterUtils.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
if (noOfAccess > 2) {
PRQHelp.getCache().close();
}
@@ -483,8 +483,8 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
} catch (Exception ex) {
gotException = true;
if (ex instanceof CacheClosedException || ex instanceof QueryInvocationTargetException) {
- LogWriterSupport.getLogWriter().info(ex.getMessage());
- LogWriterSupport.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from local node successfully.");
+ LogWriterUtils.getLogWriter().info(ex.getMessage());
+ LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from local node successfully.");
} else {
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from local node rather received", ex);
}
@@ -496,14 +496,14 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
}
);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
public void testCacheCloseExceptionFromLocalAndRemote2() throws Exception {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
@@ -514,7 +514,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vmList.add(vm1);
vmList.add(vm0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp
@@ -522,17 +522,17 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vm1.invoke(PRQHelp
.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, redundancy, numOfBuckets));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
@@ -542,22 +542,22 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
@@ -573,7 +573,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
@Override
public void afterIterationEvaluation(Object result) {
- LogWriterSupport.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
+ LogWriterUtils.getLogWriter().info("Calling after IterationEvaluation :" + noOfAccess);
if (noOfAccess > 1) {
PRQHelp.getCache().getRegion(name).destroyRegion();
}
@@ -626,8 +626,8 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
} catch (Exception ex) {
gotException = true;
if (ex instanceof QueryInvocationTargetException) {
- LogWriterSupport.getLogWriter().info(ex.getMessage());
- LogWriterSupport.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from remote node successfully as region.destroy happened before cache.close().");
+ LogWriterUtils.getLogWriter().info(ex.getMessage());
+ LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Test received Exception from remote node successfully as region.destroy happened before cache.close().");
} else {
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest: Test did not receive Exception as expected from local node rather received", ex);
}
@@ -639,14 +639,14 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
}
);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
public void testForceReattemptExceptionFromLocal() throws Exception {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception test Started");
Host host = Host.getHost(0);
@@ -659,7 +659,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vmList.add(vm0);
vmList.add(vm2);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating PR's across all VM0 , VM1");
vm0.invoke(PRQHelp
@@ -669,17 +669,17 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
vm2.invoke(PRQHelp
.getCacheSerializableRunnableForPRCreateLimitedBuckets(name, 1/*redundancy*/, numOfBuckets));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created PR on VM0 , VM1");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Created Local Region on VM0");
@@ -689,22 +689,22 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithLocalAndRemoteException: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
@@ -721,13 +721,13 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
@Override
public void startQuery(Query query) {
Object region = ((DefaultQuery)query).getRegionsInQuery(null).iterator().next();
- LogWriterSupport.getLogWriter().info("Region type on VM1:"+region);
+ LogWriterUtils.getLogWriter().info("Region type on VM1:"+region);
if (noOfAccess == 1) {
PartitionedRegion pr = (PartitionedRegion)PRQHelp.getCache().getRegion(name);
List buks = pr.getLocalPrimaryBucketsListTestOnly();
- LogWriterSupport.getLogWriter().info("Available buckets:"+buks);
+ LogWriterUtils.getLogWriter().info("Available buckets:"+buks);
int bukId = ((Integer)(buks.get(0))).intValue();
- LogWriterSupport.getLogWriter().info("Destroying bucket id:"+bukId);
+ LogWriterUtils.getLogWriter().info("Destroying bucket id:"+bukId);
pr.getDataStore().getLocalBucketById(bukId).destroyRegion();
}
++noOfAccess;
@@ -752,13 +752,13 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
@Override
public void startQuery(Query query) {
Object region = ((DefaultQuery)query).getRegionsInQuery(null).iterator().next();
- LogWriterSupport.getLogWriter().info("Region type on VM0:"+region);
+ LogWriterUtils.getLogWriter().info("Region type on VM0:"+region);
if (noOfAccess == 2) {
PartitionedRegion pr = (PartitionedRegion)PRQHelp.getCache().getRegion(name);
List buks = pr.getLocalPrimaryBucketsListTestOnly();
- LogWriterSupport.getLogWriter().info("Available buckets:"+buks);
+ LogWriterUtils.getLogWriter().info("Available buckets:"+buks);
int bukId = ((Integer)(buks.get(0))).intValue();
- LogWriterSupport.getLogWriter().info("Destroying bucket id:"+bukId);
+ LogWriterUtils.getLogWriter().info("Destroying bucket id:"+bukId);
pr.getDataStore().getLocalBucketById(bukId).destroyRegion();
}
++noOfAccess;
@@ -770,7 +770,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
try {
query.execute();
- LogWriterSupport.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Query executed successfully with ForceReattemptException on local and remote both.");
+ LogWriterUtils.getLogWriter().info("PRQueryRemoteNodeExceptionDUnitTest: Query executed successfully with ForceReattemptException on local and remote both.");
} catch (Exception ex) {
gotException = true;
Assert.fail("PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Test received Exception", ex);
@@ -779,7 +779,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends PartitionedRegionDUnitT
}
);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRemoteNodeExceptionDUnitTest#testPRWithLocalAndRemoteException: Querying with PR Local/Remote Exception Test ENDED");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotByteArrayDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotByteArrayDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotByteArrayDUnitTest.java
index b464f9a..43fc2e3 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotByteArrayDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotByteArrayDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.cache.snapshot.SnapshotOptions.SnapshotFormat;
import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
public class SnapshotByteArrayDUnitTest extends CacheTestCase {
@@ -81,13 +81,13 @@ public class SnapshotByteArrayDUnitTest extends CacheTestCase {
}
private void dump(EntryEvent<Integer, Object> event) {
- LogWriterSupport.getLogWriter().info("op = " + event.getOperation());
+ LogWriterUtils.getLogWriter().info("op = " + event.getOperation());
Object obj1 = event.getNewValue();
- LogWriterSupport.getLogWriter().info("new = " + obj1);
+ LogWriterUtils.getLogWriter().info("new = " + obj1);
Object obj2 = event.getOldValue();
- LogWriterSupport.getLogWriter().info("old = " + obj2);
+ LogWriterUtils.getLogWriter().info("old = " + obj2);
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotPerformanceDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotPerformanceDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotPerformanceDUnitTest.java
index 1c1829a..83fc231 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotPerformanceDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/snapshot/SnapshotPerformanceDUnitTest.java
@@ -31,7 +31,7 @@ import com.gemstone.gemfire.cache.snapshot.RegionGenerator.SerializationType;
import com.gemstone.gemfire.cache.snapshot.SnapshotOptions.SnapshotFormat;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
public class SnapshotPerformanceDUnitTest extends CacheTestCase {
@@ -53,7 +53,7 @@ public class SnapshotPerformanceDUnitTest extends CacheTestCase {
for (SerializationType st : sts) {
for (int i = 0; i < iterations; i++) {
Region<Integer, MyObject> region = createRegion(rt, st);
- LogWriterSupport.getLogWriter().info("SNP: Testing region " + region.getName() + ", iteration = " + i);
+ LogWriterUtils.getLogWriter().info("SNP: Testing region " + region.getName() + ", iteration = " + i);
loadData(region, st, dataCount);
doExport(region);
@@ -78,9 +78,9 @@ public class SnapshotPerformanceDUnitTest extends CacheTestCase {
double eps = 1000.0 * size / elapsed;
double mbps = 1000.0 * bytes / elapsed / (1024 * 1024);
- LogWriterSupport.getLogWriter().info("SNP: Exported " + size + " entries (" + bytes + " bytes) in " + elapsed + " ms");
- LogWriterSupport.getLogWriter().info("SNP: Export entry rate: " + eps + " entries / sec");
- LogWriterSupport.getLogWriter().info("SNP: Export data rate: " + mbps + " MB / sec");
+ LogWriterUtils.getLogWriter().info("SNP: Exported " + size + " entries (" + bytes + " bytes) in " + elapsed + " ms");
+ LogWriterUtils.getLogWriter().info("SNP: Export entry rate: " + eps + " entries / sec");
+ LogWriterUtils.getLogWriter().info("SNP: Export data rate: " + mbps + " MB / sec");
}
private void doImport(Region<Integer, MyObject> region) throws Exception {
@@ -96,9 +96,9 @@ public class SnapshotPerformanceDUnitTest extends CacheTestCase {
double eps = 1000.0 * size / elapsed;
double mbps = 1000.0 * bytes / elapsed / (1024 * 1024);
- LogWriterSupport.getLogWriter().info("SNP: Imported " + size + " entries (" + bytes + " bytes) in " + elapsed + " ms");
- LogWriterSupport.getLogWriter().info("SNP: Import entry rate: " + eps + " entries / sec");
- LogWriterSupport.getLogWriter().info("SNP: Import data rate: " + mbps + " MB / sec");
+ LogWriterUtils.getLogWriter().info("SNP: Imported " + size + " entries (" + bytes + " bytes) in " + elapsed + " ms");
+ LogWriterUtils.getLogWriter().info("SNP: Import entry rate: " + eps + " entries / sec");
+ LogWriterUtils.getLogWriter().info("SNP: Import data rate: " + mbps + " MB / sec");
}
public void setUp() throws Exception {
@@ -160,7 +160,7 @@ public class SnapshotPerformanceDUnitTest extends CacheTestCase {
}
long elapsed = System.currentTimeMillis() - start;
- LogWriterSupport.getLogWriter().info("SNP: loaded " + count + " entries in " + elapsed + " ms");
+ LogWriterUtils.getLogWriter().info("SNP: loaded " + count + " entries in " + elapsed + " ms");
assertEquals(count, region.size());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug35214DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug35214DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug35214DUnitTest.java
index 042f6ee..7f6d29a 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug35214DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug35214DUnitTest.java
@@ -32,8 +32,8 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -97,7 +97,7 @@ public class Bug35214DUnitTest extends CacheTestCase {
//pause(5);
}
else {
- LogWriterSupport.getLogWriter().info("PROFILE CHECK: Found " + numProfiles + " getInitialImage Profiles (OK)");
+ LogWriterUtils.getLogWriter().info("PROFILE CHECK: Found " + numProfiles + " getInitialImage Profiles (OK)");
break;
}
}
@@ -188,7 +188,7 @@ public class Bug35214DUnitTest extends CacheTestCase {
};
af.addCacheListener(cl1);
final Region r1 = createRootRegion("r1", af.create());
- Threads.join(updater, 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(updater, 60 * 1000);
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return r1.values().size() == 0;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug38741DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug38741DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug38741DUnitTest.java
index 3f2e94c..d1a8bb4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug38741DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/Bug38741DUnitTest.java
@@ -52,7 +52,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID;
import com.gemstone.gemfire.internal.cache.tier.sockets.ClientUpdateMessageImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -156,7 +156,7 @@ public class Bug38741DUnitTest extends ClientServerTestCase {
// Setup a client which subscribes to the server region, registers (aka pulls)
// interest in keys which creates an assumed HARegionQueue on the server
// (in the event that the above code didn't already create a HARegion)
- final String serverHostName = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHostName = NetworkUtils.getServerHostName(server.getHost());
client.invoke(new CacheSerializableRunnable("Assert server copy behavior from client") {
public void run2() throws CacheException {
getCache();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheMapTxnDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheMapTxnDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheMapTxnDUnitTest.java
index 72fe9db..eb2c768 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheMapTxnDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheMapTxnDUnitTest.java
@@ -39,8 +39,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
public class CacheMapTxnDUnitTest extends DistributedTestCase{
@@ -148,8 +147,8 @@ public class CacheMapTxnDUnitTest extends DistributedTestCase{
vm0.invoke(CacheMapTxnDUnitTest.class, "miscMethodsOwner");
AsyncInvocation o2 = vm0.invokeAsync(CacheMapTxnDUnitTest.class, "miscMethodsNotOwner");//invoke in same vm but in seperate thread
AsyncInvocation o3 = vm1.invokeAsync(CacheMapTxnDUnitTest.class, "miscMethodsNotOwner");//invoke in another vm
- Threads.join(o2, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(o3, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(o2, 30 * 1000);
+ ThreadUtils.join(o3, 30 * 1000);
if(o2.exceptionOccurred()){
Assert.fail("o2 failed", o2.getException());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheRegionsReliablityStatsCheckDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheRegionsReliablityStatsCheckDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheRegionsReliablityStatsCheckDUnitTest.java
index cd6ea46..1efae19 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheRegionsReliablityStatsCheckDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheRegionsReliablityStatsCheckDUnitTest.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.cache.CachePerfStats;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -102,7 +102,7 @@ public class CacheRegionsReliablityStatsCheckDUnitTest extends CacheTestCase {
{
Properties props = new Properties();
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.setProperty(DistributionConfig.ROLES_NAME, rr1);
getSystem(props);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheTestCase.java
index 6bbd810..7a372cb 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheTestCase.java
@@ -59,7 +59,7 @@ import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -426,7 +426,7 @@ public abstract class CacheTestCase extends DistributedTestCase {
try {
cleanDiskDirs();
} catch(Exception e) {
- LogWriterSupport.getLogWriter().error("Error cleaning disk dirs", e);
+ LogWriterUtils.getLogWriter().error("Error cleaning disk dirs", e);
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml30DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml30DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml30DUnitTest.java
index 1d244f6..85fa8a4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml30DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml30DUnitTest.java
@@ -62,7 +62,7 @@ import com.gemstone.gemfire.internal.cache.xmlcache.Declarable2;
import com.gemstone.gemfire.internal.cache.xmlcache.RegionAttributesCreation;
import com.gemstone.gemfire.internal.cache.xmlcache.RegionCreation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.util.test.TestUtil;
/**
@@ -596,7 +596,7 @@ public class CacheXml30DUnitTest extends CacheXmlTestCase {
assertTrue("No XML files in " + dirName, xmlFiles.length > 0);
for (int i = 0; i < xmlFiles.length; i++) {
File xmlFile = xmlFiles[i];
- LogWriterSupport.getLogWriter().info("Parsing " + xmlFile);
+ LogWriterUtils.getLogWriter().info("Parsing " + xmlFile);
FileInputStream fis = new FileInputStream(xmlFile);
CacheXmlParser.parse(fis);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml57DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml57DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml57DUnitTest.java
index a87278b..f2eddd6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml57DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/CacheXml57DUnitTest.java
@@ -54,7 +54,7 @@ import com.gemstone.gemfire.internal.cache.xmlcache.RegionAttributesCreation;
import com.gemstone.gemfire.internal.cache.xmlcache.RegionCreation;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -380,7 +380,7 @@ public class CacheXml57DUnitTest extends CacheXml55DUnitTest
});
CacheCreation cache = new CacheCreation();
cache.createPoolFactory()
- .addServer(NetworkSupport.getServerHostName(vm0.getHost()), port)
+ .addServer(NetworkUtils.getServerHostName(vm0.getHost()), port)
.setSubscriptionEnabled(true)
.create("connectionPool");
cache.setDynamicRegionFactoryConfig(new DynamicRegionFactory.Config(null, "connectionPool", false, false));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmCallBkDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmCallBkDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmCallBkDUnitTest.java
index d98d752..3579210 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmCallBkDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmCallBkDUnitTest.java
@@ -37,7 +37,7 @@ import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -69,7 +69,7 @@ public class ClearMultiVmCallBkDUnitTest extends DistributedTestCase{
VM vm1 = host.getVM(1);
vm0.invoke(ClearMultiVmCallBkDUnitTest.class, "createCache");
vm1.invoke(ClearMultiVmCallBkDUnitTest.class, "createCache");
- LogWriterSupport.getLogWriter().fine("Cache created in successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created in successfully");
}
public void preTearDown(){
@@ -132,10 +132,10 @@ public class ClearMultiVmCallBkDUnitTest extends DistributedTestCase{
vm0.invoke(ClearMultiVmCallBkDUnitTest.class, "putMethod", objArr);
}
- LogWriterSupport.getLogWriter().fine("Did all puts successfully");
+ LogWriterUtils.getLogWriter().fine("Did all puts successfully");
vm0.invoke(ClearMultiVmCallBkDUnitTest.class,"clearMethod");
- LogWriterSupport.getLogWriter().fine("Did clear successfully");
+ LogWriterUtils.getLogWriter().fine("Did clear successfully");
while(afterClear){
}
@@ -158,10 +158,10 @@ public class ClearMultiVmCallBkDUnitTest extends DistributedTestCase{
vm0.invoke(ClearMultiVmCallBkDUnitTest.class, "putMethod", objArr);
vm1.invoke(ClearMultiVmCallBkDUnitTest.class, "getMethod", objArr);
}
- LogWriterSupport.getLogWriter().fine("Did all puts successfully");
+ LogWriterUtils.getLogWriter().fine("Did all puts successfully");
//vm0.invoke(ClearMultiVmCallBkDUnitTest.class,"putMethod");
vm1.invoke(ClearMultiVmCallBkDUnitTest.class,"clearMethod");
- LogWriterSupport.getLogWriter().fine("Did clear successfully");
+ LogWriterUtils.getLogWriter().fine("Did clear successfully");
while(afterClear){
}
@@ -231,7 +231,7 @@ public class ClearMultiVmCallBkDUnitTest extends DistributedTestCase{
static class ListenerCallBk extends CacheListenerAdapter {
public void afterRegionClear(RegionEvent event){
- LogWriterSupport.getLogWriter().fine("In afterClear:: CacheListener Callback");
+ LogWriterUtils.getLogWriter().fine("In afterClear:: CacheListener Callback");
try {
int i = 7;
region.put(""+i, "inAfterClear");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmDUnitTest.java
index 4cfe5bf..fe8371b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClearMultiVmDUnitTest.java
@@ -41,9 +41,8 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -215,8 +214,8 @@ public class ClearMultiVmDUnitTest extends DistributedTestCase{
AsyncInvocation as1 = vm0.invokeAsync(ClearMultiVmDUnitTest.class, "firstVM");
AsyncInvocation as2 = vm1.invokeAsync(ClearMultiVmDUnitTest.class, "secondVM");
- Threads.join(as1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(as2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(as1, 30 * 1000);
+ ThreadUtils.join(as2, 30 * 1000);
if(as1.exceptionOccurred()){
Assert.fail("as1 failed", as1.getException());
@@ -331,7 +330,7 @@ public class ClearMultiVmDUnitTest extends DistributedTestCase{
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if(async1.exceptionOccurred()){
Assert.fail("async1 failed", async1.getException());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientMembershipDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientMembershipDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientMembershipDUnitTest.java
index 221d285..074441e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientMembershipDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientMembershipDUnitTest.java
@@ -56,7 +56,7 @@ import com.gemstone.gemfire.management.membership.ClientMembershipListener;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -124,7 +124,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
public void testConnectionTimeout() throws Exception {
IgnoredException.addIgnoredException("failed accepting client connection");
final Host host = Host.getHost(0);
- final String hostName = NetworkSupport.getServerHostName(host);
+ final String hostName = NetworkUtils.getServerHostName(host);
final VM vm0 = host.getVM(0);
System.setProperty(AcceptorImpl.ACCEPT_TIMEOUT_PROPERTY_NAME, "1000");
try {
@@ -135,7 +135,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
SerializableRunnable createMeanSocket = new CacheSerializableRunnable("Connect to server with socket") {
public void run2() throws CacheException {
getCache(); // create a cache so we have stats
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("connecting to cache server with socket");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("connecting to cache server with socket");
try {
InetAddress addr = InetAddress.getByName(hostName);
meanSocket = new Socket(addr, port);
@@ -147,7 +147,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
};
SerializableRunnable closeMeanSocket = new CacheSerializableRunnable("close mean socket") {
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("closing mean socket");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("closing mean socket");
try {
meanSocket.close();
}
@@ -158,28 +158,28 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertEquals(0, getAcceptsInProgress());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("creating mean socket");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("creating mean socket");
vm0.invoke(createMeanSocket);
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting to see it connect on server");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting to see it connect on server");
waitForAcceptsInProgressToBe(1);
} finally {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("closing mean socket");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("closing mean socket");
vm0.invoke(closeMeanSocket);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting to see accept to go away on server");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting to see accept to go away on server");
waitForAcceptsInProgressToBe(0);
// now try it without a close. Server should timeout the mean connect
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("creating mean socket 2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("creating mean socket 2");
vm0.invoke(createMeanSocket);
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting to see it connect on server 2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting to see it connect on server 2");
waitForAcceptsInProgressToBe(1);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting to see accept to go away on server without us closing");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting to see accept to go away on server without us closing");
waitForAcceptsInProgressToBe(0);
} finally {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("closing mean socket 2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("closing mean socket 2");
vm0.invoke(closeMeanSocket);
}
@@ -739,7 +739,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
// create and register ClientMembershipListener in controller vm...
ClientMembershipListener listener = new ClientMembershipListener() {
public synchronized void memberJoined(ClientMembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] memberJoined: " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] memberJoined: " + event);
fired[JOINED] = true;
member[JOINED] = event.getMember();
memberId[JOINED] = event.getMemberId();
@@ -747,11 +747,11 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
notifyAll();
}
public synchronized void memberLeft(ClientMembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] memberLeft: " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] memberLeft: " + event);
// fail("Please update testClientMembershipEventsInClient to handle memberLeft for BridgeServer.");
}
public synchronized void memberCrashed(ClientMembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] memberCrashed: " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] memberCrashed: " + event);
fired[CRASHED] = true;
member[CRASHED] = event.getMember();
memberId[CRASHED] = event.getMemberId();
@@ -769,7 +769,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
vm0.invoke(new CacheSerializableRunnable("Create BridgeServer") {
public void run2() throws CacheException {
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -796,9 +796,9 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
String serverMemberId = (String) vm0.invoke(ClientMembershipDUnitTest.class,
"getMemberId");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] serverMember=" + serverMember);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] serverMemberId=" + serverMemberId);
assertFalse(fired[JOINED]);
assertNull(member[JOINED]);
@@ -814,7 +814,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertFalse(isClient[CRASHED]);
// sanity check...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] sanity check");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] sanity check");
DistributedMember test = new TestDistributedMember("test");
InternalClientMembership.notifyJoined(test, SERVER);
synchronized(listener) {
@@ -838,7 +838,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
resetArraysForTesting(fired, member, memberId, isClient);
// create bridge client in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] create bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
@@ -848,7 +848,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
getCache();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(Host.getHost(0)), ports, true, -1, -1, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(Host.getHost(0)), ports, true, -1, -1, null);
createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
}
@@ -861,7 +861,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] assert client detected server join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] assert client detected server join");
// first check the getCurrentServers() result
ClientCache clientCache = (ClientCache)getCache();
@@ -890,7 +890,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
vm0.invoke(new SerializableRunnable("Stop BridgeServer") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] Stop BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] Stop BridgeServer");
stopBridgeServers(getCache());
}
});
@@ -900,7 +900,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] assert client detected server departure");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] assert client detected server departure");
assertFalse(fired[JOINED]);
assertNull(member[JOINED]);
assertNull(memberId[JOINED]);
@@ -921,7 +921,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
vm0.invoke(new CacheSerializableRunnable("Recreate BridgeServer") {
public void run2() throws CacheException {
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] restarting BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] restarting BridgeServer");
startBridgeServer(ports[0]);
}
catch(IOException e) {
@@ -936,7 +936,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInClient] assert client detected server recovery");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInClient] assert client detected server recovery");
assertTrue(fired[JOINED]);
assertNotNull(member[JOINED]);
assertNotNull(memberId[JOINED]);
@@ -965,7 +965,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
// create and register ClientMembershipListener in controller vm...
ClientMembershipListener listener = new ClientMembershipListener() {
public synchronized void memberJoined(ClientMembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] memberJoined: " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] memberJoined: " + event);
fired[JOINED] = true;
member[JOINED] = event.getMember();
memberId[JOINED] = event.getMemberId();
@@ -974,7 +974,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertFalse(fired[LEFT] || fired[CRASHED]);
}
public synchronized void memberLeft(ClientMembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] memberLeft: " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] memberLeft: " + event);
fired[LEFT] = true;
member[LEFT] = event.getMember();
memberId[LEFT] = event.getMemberId();
@@ -983,7 +983,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertFalse(fired[JOINED] || fired[CRASHED]);
}
public synchronized void memberCrashed(ClientMembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] memberCrashed: " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] memberCrashed: " + event);
fired[CRASHED] = true;
member[CRASHED] = event.getMember();
memberId[CRASHED] = event.getMemberId();
@@ -999,7 +999,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
final int[] ports = new int[1];
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1012,9 +1012,9 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
String serverMemberId = getMemberId();
DistributedMember serverMember = getDistributedMember();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] serverMemberId=" + serverMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] serverMember=" + serverMember);
assertFalse(fired[JOINED]);
assertNull(member[JOINED]);
@@ -1030,7 +1030,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertFalse(isClient[CRASHED]);
// sanity check...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] sanity check");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] sanity check");
DistributedMember test = new TestDistributedMember("test");
InternalClientMembership.notifyJoined(test, CLIENT);
synchronized(listener) {
@@ -1056,14 +1056,14 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
SerializableRunnable createConnectionPool =
new CacheSerializableRunnable("Create connectionPool") {
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] create bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
getSystem(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, true, -1, 2, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, true, -1, 2, null);
createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
}
@@ -1082,7 +1082,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client join");
assertTrue(fired[JOINED]);
assertEquals(member[JOINED] + " should equal " + clientMember,
clientMember, member[JOINED]);
@@ -1103,7 +1103,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
vm0.invoke(new SerializableRunnable("Stop bridge client") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] Stop bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] Stop bridge client");
getRootRegion().getSubregion(name).close();
Map m = PoolManager.getAll();
Iterator mit = m.values().iterator();
@@ -1120,7 +1120,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client left");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client left");
assertFalse(fired[JOINED]);
assertNull(member[JOINED]);
assertNull(memberId[JOINED]);
@@ -1146,7 +1146,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client re-join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client re-join");
assertTrue(fired[JOINED]);
assertEquals(clientMember, member[JOINED]);
assertEquals(clientMemberId, memberId[JOINED]);
@@ -1167,7 +1167,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
try {
vm0.invoke(new SerializableRunnable("Stop bridge client") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] Stop bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] Stop bridge client");
getRootRegion().getSubregion(name).close();
Map m = PoolManager.getAll();
Iterator mit = m.values().iterator();
@@ -1184,7 +1184,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client crashed");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testClientMembershipEventsInServer] assert server detected client crashed");
assertFalse(fired[JOINED]);
assertNull(member[JOINED]);
assertNull(memberId[JOINED]);
@@ -1307,7 +1307,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
IgnoredException.addIgnoredException("ConnectException");
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedClients] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedClients] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1319,14 +1319,14 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertTrue(ports[0] != 0);
String serverMemberId = getMemberId();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedClients] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedClients] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedClients] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedClients] serverMemberId=" + serverMemberId);
final Host host = Host.getHost(0);
SerializableRunnable createPool =
new CacheSerializableRunnable("Create connection pool") {
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedClients] create bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedClients] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
@@ -1336,7 +1336,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
getSystem(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- Pool p = ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, true, -1, -1, null);
+ Pool p = ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, true, -1, -1, null);
createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
assertTrue(p.getServers().size() > 0);
@@ -1380,10 +1380,10 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertEquals(clientMemberIds.size(), connectedClients.size());
for (Iterator iter = connectedClients.keySet().iterator(); iter.hasNext();) {
String connectedClient = (String)iter.next();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedClients] checking for client " + connectedClient);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedClients] checking for client " + connectedClient);
assertTrue(clientMemberIds.contains(connectedClient));
Object[] result = (Object[])connectedClients.get(connectedClient);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedClients] result: " +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedClients] result: " +
(result==null? "none"
: String.valueOf(result[0])+"; connections="+result[1]));
}
@@ -1404,7 +1404,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
vm.invoke(new CacheSerializableRunnable("Create bridge server") {
public void run2() throws CacheException {
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedServers] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedServers] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1417,15 +1417,15 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
testGetConnectedServers_port = startBridgeServer(0);
}
catch (IOException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().error("startBridgeServer threw IOException", e);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException " + e.getMessage());
}
assertTrue(testGetConnectedServers_port != 0);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedServers] port=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedServers] port=" +
ports[whichVM]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedServers] serverMemberId=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedServers] serverMemberId=" +
getDistributedMember());
}
});
@@ -1434,7 +1434,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertTrue(ports[whichVM] != 0);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedServers] create bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedServers] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
@@ -1445,10 +1445,10 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
factory.setScope(Scope.LOCAL);
for (int i = 0; i < ports.length; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedServers] creating connectionpool for " +
- NetworkSupport.getServerHostName(host) + " " + ports[i]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedServers] creating connectionpool for " +
+ NetworkUtils.getServerHostName(host) + " " + ports[i]);
int[] thisServerPorts = new int[] { ports[i] };
- ClientServerTestCase.configureConnectionPoolWithName(factory, NetworkSupport.getServerHostName(host), thisServerPorts, false, -1, -1, null,"pooly"+i);
+ ClientServerTestCase.configureConnectionPoolWithName(factory, NetworkUtils.getServerHostName(host), thisServerPorts, false, -1, -1, null,"pooly"+i);
Region region = createRegion(name+"_"+i, factory.create());
assertNotNull(getRootRegion().getSubregion(name+"_"+i));
region.get("KEY-1");
@@ -1487,7 +1487,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertEquals(host.getVMCount(), connectedServers.size());
for (Iterator iter = connectedServers.keySet().iterator(); iter.hasNext();) {
String connectedServer = (String) iter.next();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetConnectedServers] value for connectedServer: " +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetConnectedServers] value for connectedServer: " +
connectedServers.get(connectedServer));
}
}
@@ -1512,7 +1512,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
vm.invoke(new CacheSerializableRunnable("Create bridge server") {
public void run2() throws CacheException {
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetNotifiedClients] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetNotifiedClients] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
Region region = createRegion(name, factory.create());
@@ -1524,15 +1524,15 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
testGetNotifiedClients_port = startBridgeServer(0);
}
catch (IOException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().error("startBridgeServer threw IOException", e);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException " + e.getMessage());
}
assertTrue(testGetNotifiedClients_port != 0);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetNotifiedClients] port=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetNotifiedClients] port=" +
ports[whichVM]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetNotifiedClients] serverMemberId=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetNotifiedClients] serverMemberId=" +
getMemberId());
}
});
@@ -1541,7 +1541,7 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
assertTrue(ports[whichVM] != 0);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetNotifiedClients] create bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetNotifiedClients] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
@@ -1551,8 +1551,8 @@ public class ClientMembershipDUnitTest extends ClientServerTestCase {
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testGetNotifiedClients] creating connection pool");
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, true, -1, -1, null);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testGetNotifiedClients] creating connection pool");
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, true, -1, -1, null);
Region region = createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
region.registerInterest("KEY-1");
[22/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionTestBase.java
index 46eb703..6726747 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionTestBase.java
@@ -46,7 +46,7 @@ import com.gemstone.gemfire.internal.cache.control.MemoryThresholds.MemoryState;
import com.gemstone.gemfire.internal.cache.lru.HeapEvictor;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -281,11 +281,11 @@ public class EvictionTestBase extends CacheTestCase {
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
- LogWriterSupport.getLogWriter().info("cache= " + cache);
- LogWriterSupport.getLogWriter().info("cache closed= " + cache.isClosed());
+ LogWriterUtils.getLogWriter().info("cache= " + cache);
+ LogWriterUtils.getLogWriter().info("cache closed= " + cache.isClosed());
cache.getResourceManager().setEvictionHeapPercentage(85);
- LogWriterSupport.getLogWriter().info("eviction= "+cache.getResourceManager().getEvictionHeapPercentage());
- LogWriterSupport.getLogWriter().info("critical= "+cache.getResourceManager().getCriticalHeapPercentage());
+ LogWriterUtils.getLogWriter().info("eviction= "+cache.getResourceManager().getEvictionHeapPercentage());
+ LogWriterUtils.getLogWriter().info("critical= "+cache.getResourceManager().getCriticalHeapPercentage());
}
catch (Exception e) {
Assert.fail("Failed while creating the cache", e);
@@ -366,7 +366,7 @@ public class EvictionTestBase extends CacheTestCase {
region = cache.createRegion(regionName, factory.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Partitioned Region created Successfully :" + region);
+ LogWriterUtils.getLogWriter().info("Partitioned Region created Successfully :" + region);
}
public static void putData(final String regionName, final int noOfElememts,
@@ -390,7 +390,7 @@ public class EvictionTestBase extends CacheTestCase {
final Region pr = cache.getRegion("DR1");
for (int counter = 1; counter <= noOfElememts; counter++) {
pr.put(new Integer(counter), new byte[sizeOfElement * 1024 * 1024]);
- LogWriterSupport.getLogWriter().info("Amar put data element no->" + counter);
+ LogWriterUtils.getLogWriter().info("Amar put data element no->" + counter);
}
}
});
@@ -413,7 +413,7 @@ public class EvictionTestBase extends CacheTestCase {
if (bucketRegion == null) {
continue;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"FINAL bucket= " + bucketRegion.getFullPath() + "size= "
+ bucketRegion.size());
}
@@ -428,7 +428,7 @@ public class EvictionTestBase extends CacheTestCase {
};
long evictionsInVM1 = (Long)dataStore1.invoke(validate);
long evictionsInVM2 = (Long)dataStore2.invoke(validate);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"EEE evicitons = " + noOfEvictions + " "
+ (evictionsInVM1 + evictionsInVM2));
assertEquals(noOfEvictions, (evictionsInVM1 + evictionsInVM2));
@@ -479,7 +479,7 @@ public class EvictionTestBase extends CacheTestCase {
while(itr.hasNext())
{
BucketRegion br=(BucketRegion)itr.next();
- LogWriterSupport.getLogWriter().info("Print "+ br.size());
+ LogWriterUtils.getLogWriter().info("Print "+ br.size());
}
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/FixedPRSinglehopDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/FixedPRSinglehopDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/FixedPRSinglehopDUnitTest.java
index 8c9c82b..86376d9 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/FixedPRSinglehopDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/FixedPRSinglehopDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.internal.cache.partitioned.fixed.FixedPartitioningTe
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -284,7 +284,7 @@ public class FixedPRSinglehopDUnitTest extends CacheTestCase {
VM server4 = host.getVM(3);
Boolean simpleFPR = false;
final int portLocator = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String hostLocator = NetworkSupport.getServerHostName(server1.getHost());
+ final String hostLocator = NetworkUtils.getServerHostName(server1.getHost());
final String locator = hostLocator + "[" + portLocator + "]";
server3.invoke(FixedPRSinglehopDUnitTest.class,
"startLocatorInVM", new Object[] { portLocator });
@@ -388,7 +388,7 @@ public class FixedPRSinglehopDUnitTest extends CacheTestCase {
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
}
@@ -434,7 +434,7 @@ public class FixedPRSinglehopDUnitTest extends CacheTestCase {
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
}
@@ -488,7 +488,7 @@ public class FixedPRSinglehopDUnitTest extends CacheTestCase {
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -605,7 +605,7 @@ public class FixedPRSinglehopDUnitTest extends CacheTestCase {
RegionAttributes attrs = factory.create();
region = cache.createRegion(PR_NAME, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + PR_NAME + " created Successfully :"
+ region.toString());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GIIDeltaDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GIIDeltaDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GIIDeltaDUnitTest.java
index 14ad28b..1e714f1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GIIDeltaDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GIIDeltaDUnitTest.java
@@ -59,7 +59,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -2030,8 +2030,8 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
RegionFactory f = getCache().createRegionFactory(getRegionAttributes());
// CCRegion = (LocalRegion)f.create(REGION_NAME);
LocalRegion lr = (LocalRegion)f.create(REGION_NAME);
- LogWriterSupport.getLogWriter().info("In createDistributedRegion, using hydra.getLogWriter()");
- LogWriterSupport.getLogWriter().fine("Unfinished Op limit="+InitialImageOperation.MAXIMUM_UNFINISHED_OPERATIONS);
+ LogWriterUtils.getLogWriter().info("In createDistributedRegion, using hydra.getLogWriter()");
+ LogWriterUtils.getLogWriter().fine("Unfinished Op limit="+InitialImageOperation.MAXIMUM_UNFINISHED_OPERATIONS);
} catch (CacheException ex) {
Assert.fail("While creating region", ex);
}
@@ -2099,9 +2099,9 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
protected void removeSystemPropertiesInVM(VM vm, final String prop) {
SerializableRunnable change = new SerializableRunnable() {
public void run() {
- LogWriterSupport.getLogWriter().info("Current prop setting: "+prop+"="+System.getProperty(prop));
+ LogWriterUtils.getLogWriter().info("Current prop setting: "+prop+"="+System.getProperty(prop));
System.getProperties().remove(prop);
- LogWriterSupport.getLogWriter().info(prop+"="+System.getProperty(prop));
+ LogWriterUtils.getLogWriter().info(prop+"="+System.getProperty(prop));
}
};
vm.invoke(change);
@@ -2134,7 +2134,7 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
DiskStoreID dsid0 = getMemberID(vm0);
DiskStoreID dsid1 = getMemberID(vm1);
int compare = dsid0.compareTo(dsid1);
- LogWriterSupport.getLogWriter().info("Before assignVMsToPandR, dsid0 is "+dsid0+",dsid1 is "+dsid1+",compare="+compare);
+ LogWriterUtils.getLogWriter().info("Before assignVMsToPandR, dsid0 is "+dsid0+",dsid1 is "+dsid1+",compare="+compare);
if (compare > 0) {
P = vm0;
R = vm1;
@@ -2142,7 +2142,7 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
P = vm1;
R = vm0;
}
- LogWriterSupport.getLogWriter().info("After assignVMsToPandR, P is "+P.getPid()+"; R is "+R.getPid()+" for region "+REGION_NAME);
+ LogWriterUtils.getLogWriter().info("After assignVMsToPandR, P is "+P.getPid()+"; R is "+R.getPid()+" for region "+REGION_NAME);
}
private DiskStoreID getMemberID(VM vm) {
@@ -2196,7 +2196,7 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
for (long i:exceptionList) {
exceptionListVerified = !rvv.contains(member, i);
if (!exceptionListVerified) {
- LogWriterSupport.getLogWriter().finer("DeltaGII:missing exception "+i+":"+rvv);
+ LogWriterUtils.getLogWriter().finer("DeltaGII:missing exception "+i+":"+rvv);
break;
}
}
@@ -2205,7 +2205,7 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
for (long i = 1; i<=regionversion; i++) {
if (!rvv.contains(member, i)) {
exceptionListVerified = false;
- LogWriterSupport.getLogWriter().finer("DeltaGII:unexpected exception "+i);
+ LogWriterUtils.getLogWriter().finer("DeltaGII:unexpected exception "+i);
break;
}
}
@@ -2221,8 +2221,8 @@ public class GIIDeltaDUnitTest extends CacheTestCase {
long gcversion = getRegionVersionForMember(rvv, member, true);
boolean exceptionListVerified = verifyExceptionList(member, regionversion, rvv, exceptionList);
- LogWriterSupport.getLogWriter().info("DeltaGII:expected:"+expectedRegionVersion+":"+expectedGCVersion);
- LogWriterSupport.getLogWriter().info("DeltaGII:actual:"+regionversion+":"+gcversion+":"+exceptionListVerified+":"+rvv);
+ LogWriterUtils.getLogWriter().info("DeltaGII:expected:"+expectedRegionVersion+":"+expectedGCVersion);
+ LogWriterUtils.getLogWriter().info("DeltaGII:actual:"+regionversion+":"+gcversion+":"+exceptionListVerified+":"+rvv);
boolean match = true;
if (expectedRegionVersion != -1) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GridAdvisorDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GridAdvisorDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GridAdvisorDUnitTest.java
index 5569670..f12333f 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GridAdvisorDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/GridAdvisorDUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -81,14 +81,14 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
final Keeper bsKeeper4 = freeTCPPorts.get(5);
final int bsPort4 = bsKeeper4.getPort();
- final String host0 = NetworkSupport.getServerHostName(host);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]" + ","
+ host0 + "[" + port2 + "]";
final Properties dsProps = new Properties();
dsProps.setProperty("locators", locators);
dsProps.setProperty("mcast-port", "0");
- dsProps.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ dsProps.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
dsProps.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
keeper1.release();
@@ -126,7 +126,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty("mcast-port", "0");
props.setProperty("locators", locators);
- dsProps.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ dsProps.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
CacheFactory.create(DistributedSystem.connect(props));
}
};
@@ -286,7 +286,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
DistributionAdvisee advisee = (DistributionAdvisee)bslist.get(i);
CacheServerAdvisor bsa = (CacheServerAdvisor)advisee.getDistributionAdvisor();
List others = bsa.fetchBridgeServers();
- LogWriterSupport.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
+ LogWriterUtils.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
assertEquals(3, others.size());
others = bsa.fetchControllers();
assertEquals(2, others.size());
@@ -314,7 +314,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
DistributionAdvisee advisee = (DistributionAdvisee)bslist.get(i);
CacheServerAdvisor bsa = (CacheServerAdvisor)advisee.getDistributionAdvisor();
List others = bsa.fetchBridgeServers();
- LogWriterSupport.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
+ LogWriterUtils.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
assertEquals(3, others.size());
others = bsa.fetchControllers();
assertEquals(2, others.size());
@@ -590,14 +590,14 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
final Keeper bsKeeper4 = freeTCPPorts.get(5);
final int bsPort4 = bsKeeper4.getPort();
- final String host0 = NetworkSupport.getServerHostName(host);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]" + ","
+ host0 + "[" + port2 + "]";
final Properties dsProps = new Properties();
dsProps.setProperty("locators", locators);
dsProps.setProperty("mcast-port", "0");
- dsProps.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ dsProps.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
dsProps.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
keeper1.release();
@@ -635,7 +635,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
props.setProperty("mcast-port", "0");
props.setProperty("locators", locators);
props.setProperty("groups", "bs1Group1, bs1Group2");
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
CacheFactory.create(DistributedSystem.connect(props));
}
});
@@ -645,7 +645,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
props.setProperty("mcast-port", "0");
props.setProperty("locators", locators);
props.setProperty("groups", "bs2Group1, bs2Group2");
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
CacheFactory.create(DistributedSystem.connect(props));
}
});
@@ -800,7 +800,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
DistributionAdvisee advisee = (DistributionAdvisee)bslist.get(i);
CacheServerAdvisor bsa = (CacheServerAdvisor)advisee.getDistributionAdvisor();
List others = bsa.fetchBridgeServers();
- LogWriterSupport.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
+ LogWriterUtils.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
assertEquals(3, others.size());
others = bsa.fetchControllers();
assertEquals(2, others.size());
@@ -828,7 +828,7 @@ public class GridAdvisorDUnitTest extends DistributedTestCase {
DistributionAdvisee advisee = (DistributionAdvisee)bslist.get(i);
CacheServerAdvisor bsa = (CacheServerAdvisor)advisee.getDistributionAdvisor();
List others = bsa.fetchBridgeServers();
- LogWriterSupport.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
+ LogWriterUtils.getLogWriter().info("found these bridgeservers in " + advisee + ": " + others);
assertEquals(3, others.size());
others = bsa.fetchControllers();
assertEquals(2, others.size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HABug36773DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HABug36773DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HABug36773DUnitTest.java
index 29ac4a8..25672cf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HABug36773DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HABug36773DUnitTest.java
@@ -41,8 +41,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -159,7 +159,7 @@ public class HABug36773DUnitTest extends DistributedTestCase
public void testDummyForBug36773()
{
- LogWriterSupport.getLogWriter().info(" This is the dummy test for the Bug 36773");
+ LogWriterUtils.getLogWriter().info(" This is the dummy test for the Bug 36773");
}
@@ -174,7 +174,7 @@ public class HABug36773DUnitTest extends DistributedTestCase
{
Region region = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Size of the region " + region.size());
+ LogWriterUtils.getLogWriter().info("Size of the region " + region.size());
assertEquals(size, region.size());
}
};
@@ -283,7 +283,7 @@ public class HABug36773DUnitTest extends DistributedTestCase
new HABug36773DUnitTest("temp").createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getIPLiteral(), new int[] {PORT1,PORT2}, true, -1, 2, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getIPLiteral(), new int[] {PORT1,PORT2}, true, -1, 2, null);
RegionAttributes attrs = factory.create();
cache.createRegion(REGION_NAME, attrs);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HAOverflowMemObjectSizerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HAOverflowMemObjectSizerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HAOverflowMemObjectSizerDUnitTest.java
index 80e26ec..6b07ec7 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HAOverflowMemObjectSizerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/HAOverflowMemObjectSizerDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ClientUpdateMessageImpl;
import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -191,7 +191,7 @@ public class HAOverflowMemObjectSizerDUnitTest extends DistributedTestCase {
client.invoke(HAOverflowMemObjectSizerDUnitTest.class,
"createCacheClient",
new Object[] { port1,
- NetworkSupport.getServerHostName(client.getHost()) });
+ NetworkUtils.getServerHostName(client.getHost()) });
serverVM.invoke(HAOverflowMemObjectSizerDUnitTest.class, "performPut",
new Object[] { new Long(0L), new Long(100L) });
@@ -217,7 +217,7 @@ public class HAOverflowMemObjectSizerDUnitTest extends DistributedTestCase {
client.invoke(HAOverflowMemObjectSizerDUnitTest.class,
"createCacheClient",
new Object[] { port2,
- NetworkSupport.getServerHostName(client.getHost()) });
+ NetworkUtils.getServerHostName(client.getHost()) });
serverVM.invoke(HAOverflowMemObjectSizerDUnitTest.class, "performPut",
new Object[] { new Long(101L), new Long(200L) });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/IncrementalBackupDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/IncrementalBackupDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/IncrementalBackupDUnitTest.java
index c5af9e4..c8f0933 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/IncrementalBackupDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/IncrementalBackupDUnitTest.java
@@ -51,7 +51,7 @@ import com.gemstone.gemfire.internal.cache.persistence.BackupManager;
import com.gemstone.gemfire.internal.util.IOUtils;
import com.gemstone.gemfire.internal.util.TransformUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -90,7 +90,7 @@ public class IncrementalBackupDUnitTest extends CacheTestCase {
private final SerializableRunnable createRegions = new SerializableRunnable() {
@Override
public void run() {
- Cache cache = getCache(new CacheFactory().set("log-level", LogWriterSupport.getDUnitLogLevel()));
+ Cache cache = getCache(new CacheFactory().set("log-level", LogWriterUtils.getDUnitLogLevel()));
cache.createDiskStoreFactory().setDiskDirs(getDiskDirs()).create("fooStore");
cache.createDiskStoreFactory().setDiskDirs(getDiskDirs()).create("barStore");
getRegionFactory(cache).setDiskStoreName("fooStore").create("fooRegion");
@@ -127,7 +127,7 @@ public class IncrementalBackupDUnitTest extends CacheTestCase {
* @param message a message to log.
*/
private void log(String message) {
- LogWriterSupport.getLogWriter().info("[IncrementalBackupDUnitTest] " + message);
+ LogWriterUtils.getLogWriter().info("[IncrementalBackupDUnitTest] " + message);
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapClearGIIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapClearGIIDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapClearGIIDUnitTest.java
index a4f8d6d..3aeceec 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapClearGIIDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapClearGIIDUnitTest.java
@@ -35,9 +35,9 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -142,7 +142,7 @@ public class MapClearGIIDUnitTest extends CacheTestCase {
region = new MapClearGIIDUnitTest("dumb object to get cache").getCache().createRegion("map", attr);
// region = region.createSubregion("map",attr);
- LogWriterSupport.getLogWriter().info("Region in VM0 created ");
+ LogWriterUtils.getLogWriter().info("Region in VM0 created ");
}
/*
public static void closeCache() {
@@ -218,7 +218,7 @@ public class MapClearGIIDUnitTest extends CacheTestCase {
}
}
});
- LogWriterSupport.getLogWriter().info("Cache created in VM1 successfully");
+ LogWriterUtils.getLogWriter().info("Cache created in VM1 successfully");
try {
AsyncInvocation asyncGII = vm0.invokeAsync(MapClearGIIDUnitTest.class,
"createRegionInVm0");
@@ -240,7 +240,7 @@ public class MapClearGIIDUnitTest extends CacheTestCase {
// now that the gii has received some entries do the clear
vm1.invoke(MapClearGIIDUnitTest.class, "clearRegionInVm1");
// wait for GII to complete
- Threads.join(asyncGII, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncGII, 30 * 1000);
if (asyncGII.exceptionOccurred()) {
Throwable t = asyncGII.getException();
Assert.fail("createRegionInVM0 failed", t);
@@ -271,13 +271,13 @@ public class MapClearGIIDUnitTest extends CacheTestCase {
public static class CacheObserverImpl extends CacheObserverAdapter {
public void afterRegionClear(RegionEvent event) {
- LogWriterSupport.getLogWriter().info("**********Received clear event in VM0 . ");
+ LogWriterUtils.getLogWriter().info("**********Received clear event in VM0 . ");
Region rgn = event.getRegion();
wasGIIInProgressDuringClear = ((LocalRegion) rgn).getImageState()
.wasRegionClearedDuringGII();
InitialImageOperation.slowImageProcessing = 0;
InitialImageOperation.slowImageSleeps = 0;
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"wasGIIInProgressDuringClear when clear event was received= "
+ wasGIIInProgressDuringClear);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapInterface2JUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapInterface2JUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapInterface2JUnitTest.java
index 4704955..0129ec6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapInterface2JUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/MapInterface2JUnitTest.java
@@ -41,7 +41,7 @@ import com.gemstone.gemfire.cache.query.CacheUtils;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.internal.util.StopWatch;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -221,7 +221,7 @@ public class MapInterface2JUnitTest {
callbackSync.notify();
}
}
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
}
@@ -274,7 +274,7 @@ public class MapInterface2JUnitTest {
}catch (Exception cwe) {
fail("The test experienced exception "+cwe);
}
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/NetSearchMessagingDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/NetSearchMessagingDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/NetSearchMessagingDUnitTest.java
index 4243b7a..f49a4c6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/NetSearchMessagingDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/NetSearchMessagingDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.distributed.internal.DistributionMessageObserver;
import com.gemstone.gemfire.internal.cache.SearchLoadAndWriteProcessor.NetSearchRequestMessage;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -196,7 +196,7 @@ public class NetSearchMessagingDUnitTest extends CacheTestCase {
LocalRegion region = (LocalRegion)cache.getRegion("region");
RegionEntry re = region.getRegionEntry("a");
Object o = re.getValueInVM(null);
- LogWriterSupport.getLogWriter().info("key a="+o);;
+ LogWriterUtils.getLogWriter().info("key a="+o);;
return o == null || o == Token.NOT_AVAILABLE;
}
};
@@ -218,7 +218,7 @@ public class NetSearchMessagingDUnitTest extends CacheTestCase {
for (String key: keys) {
RegionEntry re = region.getRegionEntry(key);
Object o = re.getValueInVM(null);
- LogWriterSupport.getLogWriter().info("key " + key + "=" + o);
+ LogWriterUtils.getLogWriter().info("key " + key + "=" + o);
assertTrue("expected key " + key + " to not be evicted",
(o != null) && (o != Token.NOT_AVAILABLE));
}
@@ -336,9 +336,9 @@ public class NetSearchMessagingDUnitTest extends CacheTestCase {
public Object call() {
Cache cache = getCache();
Region region = cache.getRegion("region");
- LogWriterSupport.getLogWriter().info("putting key="+key+"="+value);
+ LogWriterUtils.getLogWriter().info("putting key="+key+"="+value);
Object result = region.put(key, value);
- LogWriterSupport.getLogWriter().info("done putting key="+key);
+ LogWriterUtils.getLogWriter().info("done putting key="+key);
return result;
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionDUnitTest.java
index c344f2a..708b983 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionDUnitTest.java
@@ -27,7 +27,7 @@ import com.gemstone.gemfire.internal.cache.control.InternalResourceManager.Resou
import com.gemstone.gemfire.internal.cache.lru.HeapEvictor;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -74,12 +74,12 @@ public class OffHeapEvictionDUnitTest extends EvictionDUnitTest {
ds = getSystem(getDistributedSystemProperties());
cache = CacheFactory.create(ds);
assertNotNull(cache);
- LogWriterSupport.getLogWriter().info("cache= " + cache);
- LogWriterSupport.getLogWriter().info("cache closed= " + cache.isClosed());
+ LogWriterUtils.getLogWriter().info("cache= " + cache);
+ LogWriterUtils.getLogWriter().info("cache closed= " + cache.isClosed());
cache.getResourceManager().setEvictionOffHeapPercentage(85);
((GemFireCacheImpl) cache).getResourceManager().getOffHeapMonitor().stopMonitoring(true);
- LogWriterSupport.getLogWriter().info("eviction= "+cache.getResourceManager().getEvictionOffHeapPercentage());
- LogWriterSupport.getLogWriter().info("critical= "+cache.getResourceManager().getCriticalOffHeapPercentage());
+ LogWriterUtils.getLogWriter().info("eviction= "+cache.getResourceManager().getEvictionOffHeapPercentage());
+ LogWriterUtils.getLogWriter().info("critical= "+cache.getResourceManager().getCriticalOffHeapPercentage());
}
catch (Exception e) {
Assert.fail("Failed while creating the cache", e);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionStatsDUnitTest.java
index 2851411..f574e99 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OffHeapEvictionStatsDUnitTest.java
@@ -23,7 +23,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
/**
@@ -70,11 +70,11 @@ public class OffHeapEvictionStatsDUnitTest extends EvictionStatsDUnitTest {
ds = getSystem(getDistributedSystemProperties());
cache = CacheFactory.create(ds);
assertNotNull(cache);
- LogWriterSupport.getLogWriter().info("cache= " + cache);
- LogWriterSupport.getLogWriter().info("cache closed= " + cache.isClosed());
+ LogWriterUtils.getLogWriter().info("cache= " + cache);
+ LogWriterUtils.getLogWriter().info("cache closed= " + cache.isClosed());
cache.getResourceManager().setEvictionOffHeapPercentage(20);
- LogWriterSupport.getLogWriter().info("eviction= "+cache.getResourceManager().getEvictionOffHeapPercentage());
- LogWriterSupport.getLogWriter().info("critical= "+cache.getResourceManager().getCriticalOffHeapPercentage());
+ LogWriterUtils.getLogWriter().info("eviction= "+cache.getResourceManager().getEvictionOffHeapPercentage());
+ LogWriterUtils.getLogWriter().info("critical= "+cache.getResourceManager().getCriticalOffHeapPercentage());
}
catch (Exception e) {
Assert.fail("Failed while creating the cache", e);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OplogJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OplogJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OplogJUnitTest.java
index bcc20c3..1d9213e 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OplogJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/OplogJUnitTest.java
@@ -57,7 +57,7 @@ import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.util.CacheWriterAdapter;
import com.gemstone.gemfire.internal.InternalDataSerializer;
import com.gemstone.gemfire.internal.cache.Oplog.OPLOG_TYPE;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -1678,7 +1678,7 @@ public class OplogJUnitTest extends DiskRegionTestingBase
});
assertNull(conflated);
th.start();
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = false;
}
@@ -2094,7 +2094,7 @@ public class OplogJUnitTest extends DiskRegionTestingBase
finally {
((LocalRegion)region).getDiskRegion().releaseWriteLock();
}
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
region.close();
region = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache,
diskProps, Scope.LOCAL);
@@ -3170,7 +3170,7 @@ public class OplogJUnitTest extends DiskRegionTestingBase
}
});
try {
- Threads.join(clearOp, 30 * 1000, null);
+ ThreadUtils.join(clearOp, 30 * 1000);
}
catch (Exception e) {
testFailed = true;
@@ -3237,7 +3237,7 @@ public class OplogJUnitTest extends DiskRegionTestingBase
});
clearTh.start();
try {
- Threads.join(clearTh, 120 * 1000, null);
+ ThreadUtils.join(clearTh, 120 * 1000);
failure = clearTh.isAlive();
failureCause = "Clear Thread still running !";
} catch(Exception e) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionAPIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionAPIDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionAPIDUnitTest.java
index 96994e8..466dfa4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionAPIDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionAPIDUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.distributed.internal.ReplyException;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -172,7 +172,7 @@ public class PartitionedRegionAPIDUnitTest extends
RegionAttributes regionAttribs = attr.create();
cache.createRegion("PR1",
regionAttribs);
- LogWriterSupport.getLogWriter().info("Region created in VM1.");
+ LogWriterUtils.getLogWriter().info("Region created in VM1.");
}
};
@@ -192,12 +192,12 @@ public class PartitionedRegionAPIDUnitTest extends
for (int b = 0; b < numBucks; b++) {
if (par.getBucketKeys(b).contains(key)) {
foundIt = true;
- LogWriterSupport.getLogWriter().severe("Key " + key + " found in bucket " + b);
+ LogWriterUtils.getLogWriter().severe("Key " + key + " found in bucket " + b);
break;
}
}
if (!foundIt) {
- LogWriterSupport.getLogWriter().severe("Key " + key + " not found in any bucket");
+ LogWriterUtils.getLogWriter().severe("Key " + key + " not found in any bucket");
}
return foundIt;
}
@@ -348,8 +348,8 @@ public class PartitionedRegionAPIDUnitTest extends
"<ExpectedException action=remove>"
+ entryNotFoundException
+ "</ExpectedException>");
- LogWriterSupport.getLogWriter().fine("Out of doPutOperations1");
- LogWriterSupport.getLogWriter().fine("All the puts done successfully for vm0.");
+ LogWriterUtils.getLogWriter().fine("Out of doPutOperations1");
+ LogWriterUtils.getLogWriter().fine("All the puts done successfully for vm0.");
}
});
@@ -507,8 +507,8 @@ public class PartitionedRegionAPIDUnitTest extends
+ entryNotFoundException
+ "</ExpectedException>");
- LogWriterSupport.getLogWriter().fine("Out of doPutOperations2");
- LogWriterSupport.getLogWriter().fine("All the puts done successfully for vm1.");
+ LogWriterUtils.getLogWriter().fine("Out of doPutOperations2");
+ LogWriterUtils.getLogWriter().fine("All the puts done successfully for vm1.");
}
});
}
@@ -655,7 +655,7 @@ public class PartitionedRegionAPIDUnitTest extends
size = pr.size();
assertEquals("Size doesnt return expected value", size, 10);
// }
- LogWriterSupport.getLogWriter().fine(
+ LogWriterUtils.getLogWriter().fine(
"All the puts done successfully for vm0.");
@@ -796,7 +796,7 @@ public class PartitionedRegionAPIDUnitTest extends
"<ExpectedException action=remove>"
+ entryNotFoundException
+ "</ExpectedException>");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.fine("All the remove done successfully for vm0.");
}
});
@@ -1019,7 +1019,7 @@ public class PartitionedRegionAPIDUnitTest extends
assertTrue("containsKey() Validation failed for key = "
+ i, conKey);
}
- LogWriterSupport.getLogWriter().fine(
+ LogWriterUtils.getLogWriter().fine(
"containsKey() Validated entry for key = " + i);
}
@@ -1037,7 +1037,7 @@ public class PartitionedRegionAPIDUnitTest extends
"containsValueForKey() Validation failed for key = "
+ i, conKey);
}
- LogWriterSupport.getLogWriter().fine(
+ LogWriterUtils.getLogWriter().fine(
"containsValueForKey() Validated entry for key = "
+ i);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionBucketCreationDistributionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionBucketCreationDistributionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionBucketCreationDistributionDUnitTest.java
index 32da685..79669b2 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionBucketCreationDistributionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionBucketCreationDistributionDUnitTest.java
@@ -39,9 +39,9 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -118,22 +118,22 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
// put().
validateBucket2NodeBeforePutInMultiplePartitionedRegion(
startIndexForRegion, endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Bucket2Node region of partition regions before any put() successfully validated ");
// doing put() operation on multiple partition region
putInMultiplePartitionedRegion(startIndexForRegion, endIndexForRegion,
startIndexForKey, endIndexForKey);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Put() operation successfully in partition regions");
// validating bucket regions of multiple partition regions.
validateBucketsAfterPutInMultiplePartitionRegion(startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Bucket regions of partition regions successfully validated");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testBucketCerationInMultiPlePartitionRegion() Successfully completed");
}
@@ -171,23 +171,23 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Partition Regions successfully created ");
// doing put() operation from vm0 only
putInMultiplePartitionRegionFromOneVm(vm[0], startIndexForRegion,
endIndexForRegion, startIndexForKey, endIndexForKey);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Put() Opereration done only from one VM ");
// validating bucket distribution ovar all the nodes
int noBucketsExpectedOnEachNode = getNoBucketsExpectedOnEachNode();
validateBucketsDistributionInMultiplePartitionRegion(startIndexForRegion,
endIndexForRegion, noBucketsExpectedOnEachNode);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Bucket regions are equally distributed");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testBucketCerationInMultiPlePartitionRegion() successfully completed");
}
@@ -229,23 +229,23 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
// creating multiple partition regions on 3 nodes with localMaxMemory=200 redundancy = 0
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Partition Regions successfully created ");
// doing put() operation from all vms
putInMultiplePartitionedRegionFromAllVms(startIndexForRegion,
endIndexForRegion, startIndexForKey, endIndexForKey);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Put() Opereration done only from one VM ");
// validating bucket distribution ovar all the nodes
int noBucketsExpectedOnEachNode = getNoBucketsExpectedOnEachNode() - 4;
validateBucketsDistributionInMultiplePartitionRegion(startIndexForRegion,
endIndexForRegion, noBucketsExpectedOnEachNode);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Bucket regions are equally distributed");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testBucketCerationInMultiPlePartitionRegion() successfully created");
}
@@ -298,7 +298,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
// doing put() in multiple partition regions from 3 nodes.
putInMultiplePartitionedRegionFrom3Nodes(startIndexForRegion,
endIndexForRegion, startIndexForKey, endIndexForKey);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketDistributionAfterNodeAdditionInPR() - Put() operation successfully in partition regions on 3 Nodes");
@@ -316,15 +316,15 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
// doing put() in multiple partition regions from 3 nodes.
putInMultiplePartitionedRegionFrom3Nodes(startIndexForRegion,
endIndexForRegion, startIndexForKey, endIndexForKey);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketDistributionAfterNodeAdditionInPR() - Put() operation successfully in partition regions on 4th node");
// validating bucket creation in the 4th node
validateBucketsOnAllNodes(startIndexForRegion, endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketDistributionAfterNodeAdditionInPR() - buckets on all the nodes are validated");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testBucketDistributionAfterNodeAdditionInPR() successfully created");
}
@@ -370,7 +370,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
endIndexForRegion, startIndexForKey, endIndexForKey);
validateTotalNumBuckets(prPrefix, vmList, startIndexForRegion,
endIndexForRegion, expectedNumBuckets);
- LogWriterSupport.getLogWriter().info("testTotalNumBucketProperty() completed successfully");
+ LogWriterUtils.getLogWriter().info("testTotalNumBucketProperty() completed successfully");
}
@@ -515,7 +515,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -557,7 +557,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
if (async[count].exceptionOccurred()) {
@@ -590,7 +590,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -628,7 +628,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -700,7 +700,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -745,7 +745,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -774,12 +774,12 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
startIndexForRegion, endIndexForRegion));
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
if (async[count].exceptionOccurred()) {
- LogWriterSupport.getLogWriter().warning("Failure in async invocation on vm "
+ LogWriterUtils.getLogWriter().warning("Failure in async invocation on vm "
+ vm[count]
+ " with exception " + async[count].getException());
throw async[count].getException();
@@ -810,7 +810,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < 4; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < 4; count++) {
@@ -831,12 +831,12 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 4; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 4; count < AsyncInvocationArrSize; count++) {
if (async[count].exceptionOccurred()) {
- LogWriterSupport.getLogWriter().warning("Failure of async invocation on VM " +
+ LogWriterUtils.getLogWriter().warning("Failure of async invocation on VM " +
this.vm[count] + " exception thrown " + async[count].getException());
throw async[count].getException();
}
@@ -865,7 +865,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -951,12 +951,12 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
}
if (redundancyManageFlag == 0) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"validateRedundancy() - Redundancy not satisfied for the partition region : "
+ pr.getName());
}
else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"validateRedundancy() - Redundancy satisfied for the partition region : "
+ pr.getName());
}
@@ -1142,7 +1142,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
assertTrue(pr.getRegionAdvisor().getNumProfiles() > 0);
assertTrue(pr.getRegionAdvisor().getNumDataStores() > 0);
final int bucketSetSize = pr.getRegionAdvisor().getCreatedBucketsCount();
- LogWriterSupport.getLogWriter().info("BucketSet size " + bucketSetSize);
+ LogWriterUtils.getLogWriter().info("BucketSet size " + bucketSetSize);
if (bucketSetSize != 0) {
Set buckets = pr.getRegionAdvisor().getBucketSet();
Iterator it = buckets.iterator();
@@ -1156,7 +1156,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
numBucketsWithStorage++;
}
} catch (NoSuchElementException end) {
- LogWriterSupport.getLogWriter().info("BucketSet iterations " + numBucketsWithStorage);
+ LogWriterUtils.getLogWriter().info("BucketSet iterations " + numBucketsWithStorage);
}
fail("There should be no buckets assigned");
}
@@ -1188,7 +1188,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
assertNotNull(pr.getDataStore());
final int localBSize = pr.getDataStore().getBucketsManaged();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"validateBucketsDistribution() - Number of bukctes for "
+ pr.getName() + " : " + localBSize);
@@ -1263,7 +1263,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
cache.createRegion(prPrefix + i,
createRegionAttrs(redundancy, localMaxMem, numBuckets));
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"createMultiplePartitionRegion() - Partition Regions Successfully Completed ");
}
@@ -1309,7 +1309,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
Set bucketsWithStorage = pr.getRegionAdvisor().getBucketSet();
assertEquals(expectedNumBuckets, bucketsWithStorage.size());
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Total Number of buckets validated in partition region");
}
};
@@ -1382,7 +1382,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
createPartitionRegion(vmList, midIndexForRegion, endIndexForNode,
localMaxMemory, redundancyTwo);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testBucketCerationInMultiPlePartitionRegion() - Partition Regions successfully created ");
}
@@ -1394,7 +1394,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
{
for (int i = 0; i < 4; i++) {
if (vm[i] == null)
- LogWriterSupport.getLogWriter().fine("VM is null" + vm[i]);
+ LogWriterUtils.getLogWriter().fine("VM is null" + vm[i]);
vm[i].invoke(calculateMemoryOfPartitionRegion(i, i + 1));
}
}
@@ -1430,7 +1430,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
while (sizeItr.hasNext()) {
assertEquals(sizeItr.next(), objSize);
}
- LogWriterSupport.getLogWriter().info("Size of partition region on each node is equal");
+ LogWriterUtils.getLogWriter().info("Size of partition region on each node is equal");
}
};
vm[0].invoke(testTotalMemory);
@@ -1508,7 +1508,7 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
/** testing whether exception occurred */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -1533,9 +1533,9 @@ public class PartitionedRegionBucketCreationDistributionDUnitTest extends
.getRegion(Region.SEPARATOR + regionName);
for (int i = 0; i < MAX_SIZE * 2; i++) {
pr.put(key + i, Obj);
- LogWriterSupport.getLogWriter().info("MAXSIZE : " + i);
+ LogWriterUtils.getLogWriter().info("MAXSIZE : " + i);
}
- LogWriterSupport.getLogWriter().info("Put successfully done for vm" + key);
+ LogWriterUtils.getLogWriter().info("Put successfully done for vm" + key);
}
};
return putForLocalMaxMemory;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCacheCloseDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCacheCloseDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCacheCloseDUnitTest.java
index e6fd02b..8fd6f4e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCacheCloseDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCacheCloseDUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -102,7 +102,7 @@ public class PartitionedRegionCacheCloseDUnitTest extends
key = new Integer(k);
pr.put(key, rName + k);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info("VM0 Done put successfully for PR = " + rName + j);
}
}
@@ -123,13 +123,13 @@ public class PartitionedRegionCacheCloseDUnitTest extends
key = new Integer(k);
pr.put(key, rName + k);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info("VM1 Done put successfully for PR = " + rName + j);
}
}
});
- Threads.join(async0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
- Threads.join(async1, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 30 * 1000);
+ ThreadUtils.join(async1, 30 * 1000);
if(async0.exceptionOccurred()) {
Assert.fail("Exception during async0", async0.getException());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCreationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCreationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCreationDUnitTest.java
index 7a82c4a..78c70dc 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCreationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionCreationDUnitTest.java
@@ -38,9 +38,9 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
@SuppressWarnings("serial")
@@ -68,7 +68,7 @@ public class PartitionedRegionCreationDUnitTest extends
*/
public void testSequentialCreation() throws Exception
{
- LogWriterSupport.getLogWriter().info("*****CREATION TEST ACK STARTED*****");
+ LogWriterUtils.getLogWriter().info("*****CREATION TEST ACK STARTED*****");
final String name = getUniqueName();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
@@ -91,7 +91,7 @@ public class PartitionedRegionCreationDUnitTest extends
vm1.invoke(getCacheSerializableRunnableForPRValidate(name));
vm2.invoke(getCacheSerializableRunnableForPRValidate(name));
vm3.invoke(getCacheSerializableRunnableForPRValidate(name));
- LogWriterSupport.getLogWriter().info("*****CREATION TEST ACK ENDED*****");
+ LogWriterUtils.getLogWriter().info("*****CREATION TEST ACK ENDED*****");
}
/**
@@ -104,7 +104,7 @@ public class PartitionedRegionCreationDUnitTest extends
// 2/8/06
public void testConcurrentCreation() throws Throwable
{
- LogWriterSupport.getLogWriter().info("*****CREATION TEST NO_ACK STARTED*****");
+ LogWriterUtils.getLogWriter().info("*****CREATION TEST NO_ACK STARTED*****");
final String name = getUniqueName();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
@@ -124,7 +124,7 @@ public class PartitionedRegionCreationDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -138,7 +138,7 @@ public class PartitionedRegionCreationDUnitTest extends
vm1.invoke(getCacheSerializableRunnableForPRValidate(name));
vm2.invoke(getCacheSerializableRunnableForPRValidate(name));
vm3.invoke(getCacheSerializableRunnableForPRValidate(name));
- LogWriterSupport.getLogWriter().info("*****CREATION TEST NO_ACK ENDED*****");
+ LogWriterUtils.getLogWriter().info("*****CREATION TEST NO_ACK ENDED*****");
}
/**
@@ -254,7 +254,7 @@ public class PartitionedRegionCreationDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
th.join(30 * 1000);
@@ -427,7 +427,7 @@ public class PartitionedRegionCreationDUnitTest extends
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info("*****INITIALIZATION TEST STARTED*****");
+ LogWriterUtils.getLogWriter().info("*****INITIALIZATION TEST STARTED*****");
int AsyncInvocationArrSize = 8;
AsyncInvocation[] async = new AsyncInvocation[AsyncInvocationArrSize];
async[0] = vm0.invokeAsync(getCacheSerializableRunnableForPRCreate(name,
@@ -441,7 +441,7 @@ public class PartitionedRegionCreationDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < 4; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < 4; count++) {
@@ -457,7 +457,7 @@ public class PartitionedRegionCreationDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 4; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 4; count < AsyncInvocationArrSize; count++) {
@@ -465,7 +465,7 @@ public class PartitionedRegionCreationDUnitTest extends
Assert.fail("exception during " + count, async[count].getException());
}
}
- LogWriterSupport.getLogWriter().info("*****INITIALIZATION TEST ENDED*****");
+ LogWriterUtils.getLogWriter().info("*****INITIALIZATION TEST ENDED*****");
}
/**
@@ -483,7 +483,7 @@ public class PartitionedRegionCreationDUnitTest extends
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info("*****REGISTRATION TEST STARTED*****");
+ LogWriterUtils.getLogWriter().info("*****REGISTRATION TEST STARTED*****");
int AsyncInvocationArrSize = 8;
AsyncInvocation[] async = new AsyncInvocation[AsyncInvocationArrSize];
async[0] = vm0.invokeAsync(getCacheSerializableRunnableForPRCreate(name,
@@ -497,7 +497,7 @@ public class PartitionedRegionCreationDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < 4; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < 4; count++) {
@@ -517,7 +517,7 @@ public class PartitionedRegionCreationDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 4; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 4; count < AsyncInvocationArrSize; count++) {
@@ -525,7 +525,7 @@ public class PartitionedRegionCreationDUnitTest extends
Assert.fail("exception during " + count, async[count].getException());
}
}
- LogWriterSupport.getLogWriter().info("*****REGISTRATION TEST ENDED*****");
+ LogWriterUtils.getLogWriter().info("*****REGISTRATION TEST ENDED*****");
}
/**
@@ -543,13 +543,13 @@ public class PartitionedRegionCreationDUnitTest extends
VM dataStore1 = host.getVM(1);
VM accessor0 = host.getVM(2);
VM accessor1 = host.getVM(3);
- LogWriterSupport.getLogWriter().info("*****PERSISTENCE CONFLICTS TEST STARTED*****");
+ LogWriterUtils.getLogWriter().info("*****PERSISTENCE CONFLICTS TEST STARTED*****");
accessor0.invoke(getCacheSerializableRunnableForPRPersistence(name, 0, false, false));
accessor1.invoke(getCacheSerializableRunnableForPRPersistence(name, 0, true, true));
dataStore0.invoke(getCacheSerializableRunnableForPRPersistence(name, 100, true, false));
dataStore1.invoke(getCacheSerializableRunnableForPRPersistence(name, 100, false, true));
- LogWriterSupport.getLogWriter().info("*****PERSISTENCE CONFLICTS TEST ENDED*****");
+ LogWriterUtils.getLogWriter().info("*****PERSISTENCE CONFLICTS TEST ENDED*****");
}
/**
@@ -629,7 +629,7 @@ public class PartitionedRegionCreationDUnitTest extends
+ name + " configs do not exists in region - "
+ root.getName());
}
- LogWriterSupport.getLogWriter().info(" PartitionedRegionCreationTest PartionedRegionRegistrationTest() Successfully Complete .. ");
+ LogWriterUtils.getLogWriter().info(" PartitionedRegionCreationTest PartionedRegionRegistrationTest() Successfully Complete .. ");
}
};
return (CacheSerializableRunnable)registerPrRegion;
@@ -695,11 +695,11 @@ public class PartitionedRegionCreationDUnitTest extends
getCache().getLogger().warning(
"Creation caught IllegalStateException", ex);
if (exceptionType.equals("GLOBAL"))
- LogWriterSupport.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for scope = GLOBAL");
+ LogWriterUtils.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for scope = GLOBAL");
if (exceptionType.equals("REDUNDANCY"))
- LogWriterSupport.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for 0 > redundancy > 3 ");
+ LogWriterUtils.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for 0 > redundancy > 3 ");
if (exceptionType.equals("DIFFREG"))
- LogWriterSupport.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for regions with diff scope ");
+ LogWriterUtils.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for regions with diff scope ");
}
assertNotNull("Partitioned Region " + regionName + " not in cache",
cache.getRegion(regionName));
@@ -733,11 +733,11 @@ public class PartitionedRegionCreationDUnitTest extends
getCache().getLogger().warning(
"Creation caught IllegalStateException", ex);
if (exceptionType.equals("GLOBAL"))
- LogWriterSupport.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for scope = GLOBAL");
+ LogWriterUtils.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for scope = GLOBAL");
if (exceptionType.equals("REDUNDANCY"))
- LogWriterSupport.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for 0 > redundancy > 3 ");
+ LogWriterUtils.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for 0 > redundancy > 3 ");
if (exceptionType.equals("DIFFREG"))
- LogWriterSupport.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for regions with diff scope ");
+ LogWriterUtils.getLogWriter().info("PartitionedRegionCreationDUnitTest:testPartitionedRegionCreationExceptions() Got a Correct exception for regions with diff scope ");
}
assertNotNull("Partitioned Region " + rName + " not in cache",
cache.getRegion(rName));
@@ -870,7 +870,7 @@ public class PartitionedRegionCreationDUnitTest extends
RegionAttributes regionAttribs = attr.create();
PartitionedRegion accessor = (PartitionedRegion)cache.createRegion(
"PR1", regionAttribs);
- LogWriterSupport.getLogWriter().info("Region created in VM1.");
+ LogWriterUtils.getLogWriter().info("Region created in VM1.");
assertEquals(accessor.getTotalNumberOfBuckets(),
PartitionAttributesFactory.GLOBAL_MAX_BUCKETS_DEFAULT);
try {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDUnitTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDUnitTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDUnitTestCase.java
index 1411e6c..cd6e980 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDUnitTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDUnitTestCase.java
@@ -199,10 +199,10 @@ public class PartitionedRegionDUnitTestCase extends CacheTestCase
prPrefix + i,
PartitionedRegionTestHelper.createRegionAttrsForPR(redundancy,
localmaxMemory, recoveryDelay));
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Created Region new --- " + prPrefix + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Created Region new --- " + prPrefix + i);
} catch (RegionExistsException ignore) {}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("getCreateMultiplePRregion() - Partition Regions Successfully Completed ");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("getCreateMultiplePRregion() - Partition Regions Successfully Completed ");
}
};
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDestroyDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDestroyDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDestroyDUnitTest.java
index 7688678..e5a95cd 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDestroyDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionDestroyDUnitTest.java
@@ -31,9 +31,9 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -80,7 +80,7 @@ public class PartitionedRegionDestroyDUnitTest extends
cache.createRegion(PR_PREFIX + i,
createRegionAttrsForPR(0, 200));
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Successfully created " + MAX_REGIONS + " PartitionedRegions.");
}
};
@@ -159,7 +159,7 @@ public class PartitionedRegionDestroyDUnitTest extends
}
}
catch (RegionDestroyedException e) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"RegionDestroyedException occured for Region = " + PR_PREFIX + j);
}
getCache().getLogger().info("<ExpectedException action=remove>" +
@@ -167,7 +167,7 @@ public class PartitionedRegionDestroyDUnitTest extends
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if(async1.exceptionOccurred()) {
Assert.fail("async1 failed", async1.getException());
}
@@ -228,18 +228,18 @@ public class PartitionedRegionDestroyDUnitTest extends
// Assert that all PartitionedRegions are gone
assertEquals(0, rootRegion.size());
- LogWriterSupport.getLogWriter().info("allPartitionedRegions size() =" + rootRegion.size());
+ LogWriterUtils.getLogWriter().info("allPartitionedRegions size() =" + rootRegion.size());
assertEquals("ThePrIdToPR Map size is:"+PartitionedRegion.prIdToPR.size()+" instead of 0", MAX_REGIONS, PartitionedRegion.prIdToPR.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PartitionedRegion.prIdToPR.size() ="
+ PartitionedRegion.prIdToPR.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"# of Subregions of root Region after destroy call = "
+ rootRegion.subregions(false).size());
Iterator itr = (rootRegion.subregions(false)).iterator();
while (itr.hasNext()) {
Region rg = (Region)itr.next();
- LogWriterSupport.getLogWriter().info("Root Region SubRegionName = " + rg.getName());
+ LogWriterUtils.getLogWriter().info("Root Region SubRegionName = " + rg.getName());
// assertEquals("REGION NAME FOUND:"+rg.getName(),-1, rg.getName().indexOf(
// PartitionedRegionHelper.BUCKET_2_NODE_TABLE_PREFIX));
assertEquals("regionFound that should be gone!:"+rg.getName(),-1, rg.getName().indexOf(
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHADUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHADUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHADUnitTest.java
index acc4f5b..3f4edf1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHADUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHADUnitTest.java
@@ -39,10 +39,10 @@ import com.gemstone.gemfire.internal.cache.control.InternalResourceManager.Resou
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -248,14 +248,14 @@ public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
public void run2() throws CacheException {
getCache().getLogger().info("<ExpectedException action=add>" +
expectedExceptions + "</ExpectedException>");
- LogWriterSupport.getLogWriter().info("<ExpectedException action=add>" +
+ LogWriterUtils.getLogWriter().info("<ExpectedException action=add>" +
expectedExceptions + "</ExpectedException>");
}
};
SerializableRunnable removeExpectedExceptions =
new CacheSerializableRunnable("removeExpectedExceptions") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("<ExpectedException action=remove>" +
+ LogWriterUtils.getLogWriter().info("<ExpectedException action=remove>" +
expectedExceptions + "</ExpectedException>");
getCache().getLogger().info("<ExpectedException action=remove>" +
expectedExceptions + "</ExpectedException>");
@@ -273,7 +273,7 @@ public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
for (int k = 0; k < 10; k++) {
pr.put(j + PR_PREFIX + k, PR_PREFIX + k);
}
- LogWriterSupport.getLogWriter().info("VM0 Done put successfully for PR = " + PR_PREFIX
+ LogWriterUtils.getLogWriter().info("VM0 Done put successfully for PR = " + PR_PREFIX
+ j);
}
}
@@ -290,7 +290,7 @@ public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
for (int k = 10; k < 20; k++) {
pr.put(j + PR_PREFIX + k, PR_PREFIX + k);
}
- LogWriterSupport.getLogWriter().info("VM1 Done put successfully for PR = " + PR_PREFIX
+ LogWriterUtils.getLogWriter().info("VM1 Done put successfully for PR = " + PR_PREFIX
+ j);
}
}
@@ -299,7 +299,7 @@ public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
// dataStore1.invoke(addExpectedExceptions);
AsyncInvocation async0 = dataStore0.invokeAsync(dataStore0Puts);
// AsyncInvocation async1 = dataStore1.invokeAsync(dataStore1Puts);
- Threads.join(async0, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 30 * 1000);
// async1.join();
dataStore0.invoke(removeExpectedExceptions);
// dataStore1.invoke(removeExpectedExceptions);
@@ -319,7 +319,7 @@ public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
async0 = dataStore0.invokeAsync(dataStore0Puts);
// async1 = dataStore1.invokeAsync(dataStore1Puts);
- Threads.join(async0, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 30 * 1000);
// async1.join();
if (async0.exceptionOccurred()) {
@@ -380,7 +380,7 @@ public class PartitionedRegionHADUnitTest extends PartitionedRegionDUnitTestCase
// This accessor should NOT have picked up any buckets.
assertFalse(vm3LBRsize != 0);
int vm2B2Nsize = ((Integer)dataStore2.invoke(validateBucketsOnNode)).intValue();
- LogWriterSupport.getLogWriter().info("vm2B2Nsize = " + vm2B2Nsize);
+ LogWriterUtils.getLogWriter().info("vm2B2Nsize = " + vm2B2Nsize);
assertEquals(vm2B2Nsize, vm2LBRsize);
}
}
[30/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithoutWLDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithoutWLDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithoutWLDUnitTest.java
index 0cbede3..d5badab 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithoutWLDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithoutWLDUnitTest.java
@@ -47,10 +47,10 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.SerializableRunnableIF;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
/**
*
@@ -125,7 +125,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 30*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 30*000);
}
for (AsyncInvocation inv : asyncInvs) {
@@ -166,7 +166,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 30*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 30*000);
}
for (AsyncInvocation inv : asyncInvs) {
@@ -210,7 +210,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, totalDataSize));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 30*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 30*000);
}
for (AsyncInvocation inv : asyncInvs) {
if (inv.exceptionOccurred()) {
@@ -268,7 +268,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize ));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 60*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 60*000);
}
for (AsyncInvocation inv : asyncInvs) {
@@ -330,7 +330,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize ));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 60*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 60*000);
}
for (AsyncInvocation inv : asyncInvs) {
if (inv.exceptionOccurred()) {
@@ -402,7 +402,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize ));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 60*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 60*000);
}
for (AsyncInvocation inv : asyncInvs) {
@@ -481,7 +481,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
if (index instanceof CompactRangeIndex) {
// Ignore invalid values.
if (value != Token.INVALID && value != Token.TOMBSTONE) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
Integer ID = ((Portfolio) value).getID();
assertTrue("Did not find index key for REgionEntry [key: "
@@ -517,7 +517,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
expectedNullEntries++;
}
} else {
- LogWriterSupport.getLogWriter().info(internalEntry.getKey()+"");
+ LogWriterUtils.getLogWriter().info(internalEntry.getKey()+"");
expectedUndefinedEntries++;
}
}
@@ -529,7 +529,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
Collection<Position> positions = ((Portfolio)value).positions.values();
for (Position pos : positions) {
if (pos != null) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
String secId = pos.secId;
assertTrue("Did not find index key for REgionEntry [key: "
+ internalEntry.getKey() + " , value: " + value
@@ -603,7 +603,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
IndexStatistics stats = index.getStatistics();
if (index instanceof CompactRangeIndex) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" Actual Size of Index is: " + actualSize);
/* getLogWriter().info(
" Actual Size of Index is: " + actualSize + " Undefined size is: "
@@ -615,21 +615,21 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
getLogWriter().info(((RegionEntry) obj).getKey() + "");
}
*/
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" Expected Size of Index is: " + expectedIndexSize
+ " Undefined size is: " + expectedUndefinedEntries
+ " And NULL size is: " + expectedNullEntries);
assertEquals("No of index keys NOT equals the no shown in statistics for index:" + index.getName(), ((CompactRangeIndex) index).getIndexStorage().size(), stats.getNumberOfKeys());
} else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" Actual Size of Index is: " + actualSize + " Undefined size is: "
+ ((RangeIndex) index).undefinedMappedEntries.getNumEntries()
+ " And NULL size is: "
+ ((RangeIndex) index).nullMappedEntries.getNumEntries());
for (Object obj : ((RangeIndex) index).undefinedMappedEntries.map.keySet()) {
- LogWriterSupport.getLogWriter().info(((RegionEntry) obj).getKey() + "");
+ LogWriterUtils.getLogWriter().info(((RegionEntry) obj).getKey() + "");
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" Expected Size of Index is: " + expectedIndexSize
+ " Undefined size is: " + expectedUndefinedEntries
+ " And NULL size is: " + expectedNullEntries);
@@ -674,7 +674,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
if (index instanceof CompactRangeIndex) {
// Ignore invalid values.
if (value != Token.INVALID && value != Token.TOMBSTONE) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
Integer ID = ((Portfolio) value).getID();
assertTrue("Did not find index key for REgionEntry [key: "
@@ -722,7 +722,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
Collection<Position> positions = ((Portfolio)value).positions.values();
for (Position pos : positions) {
if (pos != null) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
String secId = pos.secId;
assertTrue("Did not find index key for REgionEntry [key: "
+ internalEntry.getKey() + " , value: " + value
@@ -769,7 +769,7 @@ public class ConcurrentIndexUpdateWithoutWLDUnitTest extends
try {
iter = ((CompactRangeIndex) index).getIndexStorage().iterator(null);
while (iter.hasNext()) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Index Values : " + iter.next().getDeserializedValue());
actualValueSize++;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/CopyOnReadIndexDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/CopyOnReadIndexDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/CopyOnReadIndexDUnitTest.java
index ab10a18..d44c7f6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/CopyOnReadIndexDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/CopyOnReadIndexDUnitTest.java
@@ -43,10 +43,10 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -613,7 +613,7 @@ public class CopyOnReadIndexDUnitTest extends CacheTestCase {
getSystem(props);
final ClientCacheFactory ccf = new ClientCacheFactory(props);
- ccf.addPoolServer(NetworkSupport.getServerHostName(server.getHost()), port);
+ ccf.addPoolServer(NetworkUtils.getServerHostName(server.getHost()), port);
ccf.setPoolSubscriptionEnabled(true);
ClientCache cache = (ClientCache)getClientCache(ccf);
@@ -630,7 +630,7 @@ public class CopyOnReadIndexDUnitTest extends CacheTestCase {
protected Properties getServerProperties() {
Properties p = new Properties();
- p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
return p;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexCreationInternalsJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexCreationInternalsJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexCreationInternalsJUnitTest.java
index 4f2724c..7df9a5b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexCreationInternalsJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexCreationInternalsJUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.cache.query.internal.CompiledRegion;
import com.gemstone.gemfire.cache.query.internal.QCompiler;
import com.gemstone.gemfire.cache.query.internal.types.TypeUtils;
import com.gemstone.gemfire.internal.cache.LocalRegion;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -199,8 +199,8 @@ public class IndexCreationInternalsJUnitTest {
th1.start();
th2.start();
name = imgr.putCanonicalizedIteratorNameIfAbsent("index_iter1.coll1");
- Threads.join(th1, 30 * 1000, null);
- Threads.join(th2, 30 * 1000, null);
+ ThreadUtils.join(th1, 30 * 1000);
+ ThreadUtils.join(th2, 30 * 1000);
if( !(name.equals(this.childThreadName1) && name.equals(this.childThreadName2)) ) {
fail("Canonicalization name generation test failed in concurrent scenario as first name is "+this.childThreadName1 + "and second is "+name + " and third is "+this.childThreadName2);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexMaintainceJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexMaintainceJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexMaintainceJUnitTest.java
index bea41e9..c1d39b9 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexMaintainceJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexMaintainceJUnitTest.java
@@ -51,7 +51,7 @@ import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.query.functional.StructSetOrResultsSet;
import com.gemstone.gemfire.cache.query.internal.QueryObserverAdapter;
import com.gemstone.gemfire.cache.query.internal.QueryObserverHolder;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -322,7 +322,7 @@ public class IndexMaintainceJUnitTest {
}
});
th.start();
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
assertTrue(IndexMaintainceJUnitTest.region.size() == 1);
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexTrackingQueryObserverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexTrackingQueryObserverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexTrackingQueryObserverDUnitTest.java
index 5dc75ef..d9b0d5e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexTrackingQueryObserverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/IndexTrackingQueryObserverDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionQueryEvaluator.TestH
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -275,7 +275,7 @@ public class IndexTrackingQueryObserverDUnitTest extends CacheTestCase {
totalResults += i.intValue();
}
- LogWriterSupport.getLogWriter().fine("Index Info result size is " + totalResults);
+ LogWriterUtils.getLogWriter().fine("Index Info result size is " + totalResults);
assertEquals(results, totalResults);
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/InitializeIndexEntryDestroyQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/InitializeIndexEntryDestroyQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/InitializeIndexEntryDestroyQueryDUnitTest.java
index b4ae88c..bfc431e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/InitializeIndexEntryDestroyQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/InitializeIndexEntryDestroyQueryDUnitTest.java
@@ -37,8 +37,8 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -90,7 +90,7 @@ public class InitializeIndexEntryDestroyQueryDUnitTest extends CacheTestCase {
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
localRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", localRegion);
@@ -197,12 +197,12 @@ public class InitializeIndexEntryDestroyQueryDUnitTest extends CacheTestCase {
}
});
- Threads.join(asyInvk0, 1000 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 1000 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
- Threads.join(asyInvk1, 1000 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk1, 1000 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
@@ -227,7 +227,7 @@ public class InitializeIndexEntryDestroyQueryDUnitTest extends CacheTestCase {
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -333,12 +333,12 @@ public class InitializeIndexEntryDestroyQueryDUnitTest extends CacheTestCase {
}
});
- Threads.join(asyInvk0, 1000 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 1000 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
- Threads.join(asyInvk1, 1000 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk1, 1000 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
@@ -363,7 +363,7 @@ public class InitializeIndexEntryDestroyQueryDUnitTest extends CacheTestCase {
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/MultiIndexCreationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/MultiIndexCreationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/MultiIndexCreationDUnitTest.java
index 5cad45b..cea00c1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/MultiIndexCreationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/MultiIndexCreationDUnitTest.java
@@ -33,9 +33,9 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -90,7 +90,7 @@ public class MultiIndexCreationDUnitTest extends CacheTestCase {
public Object call() throws Exception {
long giveupTime = System.currentTimeMillis() + 60000;
while (!hooked && System.currentTimeMillis() < giveupTime) {
- LogWriterSupport.getLogWriter().info("Query Waiting for index hook.");
+ LogWriterUtils.getLogWriter().info("Query Waiting for index hook.");
Wait.pause(100);
}
assertTrue(hooked);
@@ -127,12 +127,12 @@ public class MultiIndexCreationDUnitTest extends CacheTestCase {
}
});
- Threads.join(a1, 6000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(a1, 6000);
if(a1.exceptionOccurred()) {
fail(a1.getException().getMessage());
}
- Threads.join(a2, 6000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(a2, 6000);
if(a2.exceptionOccurred()) {
fail(a2.getException().getMessage());
}
@@ -189,9 +189,9 @@ public class MultiIndexCreationDUnitTest extends CacheTestCase {
long giveupTime = System.currentTimeMillis() + 60000;
if (spot == 13) {
hooked = true;
- LogWriterSupport.getLogWriter().info("MultiIndexCreationTestHook is hooked in create defined indexes.");
+ LogWriterUtils.getLogWriter().info("MultiIndexCreationTestHook is hooked in create defined indexes.");
while (hooked && System.currentTimeMillis() < giveupTime) {
- LogWriterSupport.getLogWriter().info("MultiIndexCreationTestHook waiting.");
+ LogWriterUtils.getLogWriter().info("MultiIndexCreationTestHook waiting.");
Wait.pause(100);
}
assertEquals(hooked, false);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/PutAllWithIndexPerfDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/PutAllWithIndexPerfDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/PutAllWithIndexPerfDUnitTest.java
index 098c814..fd50e63 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/PutAllWithIndexPerfDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/PutAllWithIndexPerfDUnitTest.java
@@ -40,9 +40,9 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.junit.categories.DistributedTest;
@@ -84,7 +84,7 @@ public class PutAllWithIndexPerfDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.put("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.put("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
Cache cache = new CacheFactory(config).create();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -105,7 +105,7 @@ public class PutAllWithIndexPerfDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(PutAllWithIndexPerfDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDUnitTest.java
index 7df9922..30a5d6f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDUnitTest.java
@@ -28,9 +28,9 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -78,7 +78,7 @@ public class PRBasicIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreationDUnitTest.testPRBasicIndexCreate started ....");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
@@ -88,7 +88,7 @@ public class PRBasicIndexCreationDUnitTest extends
// Scope.DISTRIBUTED_ACK, redundancy));
// Creating the Datastores Nodes in the VM1.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("PRBasicIndexCreationDUnitTest : creating all the prs ");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
@@ -112,7 +112,7 @@ public class PRBasicIndexCreationDUnitTest extends
name, "PrIndexOnStatus", "p.status",null, "p"));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForDuplicatePRIndexCreate(
name, "PrIndexOnStatus", "p.status",null, "p"));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreationDUnitTest.testPRBasicIndexCreate is done ");
}
@@ -128,7 +128,7 @@ public class PRBasicIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testPRMultiIndexCreation Test Started");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
@@ -152,7 +152,7 @@ public class PRBasicIndexCreationDUnitTest extends
// will throw a RuntimeException.
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnID", "p.ID", null, "p"));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRQBasicIndexCreationTest.testPRMultiIndexCreation ENDED");
}
@@ -170,7 +170,7 @@ public class PRBasicIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testPRMultiIndexCreation Test Started");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
@@ -234,7 +234,7 @@ public class PRBasicIndexCreationDUnitTest extends
// Check getIndex() on datastore
vm1.invoke(getIndexCheck);
- LogWriterSupport.getLogWriter().info("PRQBasicIndexCreationTest.testPRMultiIndexCreation ENDED");
+ LogWriterUtils.getLogWriter().info("PRQBasicIndexCreationTest.testPRMultiIndexCreation ENDED");
}
/**
@@ -250,28 +250,28 @@ public class PRBasicIndexCreationDUnitTest extends
// VM vm3 = host.getVM(3);
// closeAllCache();
final String fileName = "PRIndexCreation.xml";
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testCreatePartitionedIndexThroughXML started");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Starting and initializing partitioned regions and indexes using xml");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Starting a pr asynchronously using an xml file name : " + fileName);
AsyncInvocation asyInvk0 = vm0.invokeAsync(PRQHelp
.getCacheSerializableRunnableForPRCreateThrougXML(name, fileName));
AsyncInvocation asyInvk1 = vm1.invokeAsync(PRQHelp
.getCacheSerializableRunnableForPRCreateThrougXML(name, fileName));
- Threads.join(asyInvk1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk1, 30 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
- Threads.join(asyInvk0, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
// printing all the indexes are created.
vm0.invoke(PRQHelp.getCacheSerializableRunnableForIndexCreationCheck(name));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForIndexCreationCheck(name));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testCreatePartitionedIndexThroughXML is done " );
}
@@ -293,7 +293,7 @@ public class PRBasicIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testCreatePartitionedRegionThroughXMLAndAPI started ");
// creating all the prs
@@ -341,7 +341,7 @@ public class PRBasicIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testCreatePartitionedIndexWithNoAliasAfterPuts started ");
// creating all the prs
@@ -387,7 +387,7 @@ public class PRBasicIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testCreatePartitionedIndexWithNoAliasBeforePuts started ");
// creating all the prs
@@ -466,7 +466,7 @@ public class PRBasicIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexUsageWithPRQuery started ");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -494,7 +494,7 @@ public class PRBasicIndexCreationDUnitTest extends
vm1.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexUsageWithPRQuery done ");
}
@@ -514,7 +514,7 @@ public class PRBasicIndexCreationDUnitTest extends
// fileName));
int redundancy = 1;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexCreationDuringPersistentRecovery started ");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPersistentPRCreate(name,
@@ -553,7 +553,7 @@ public class PRBasicIndexCreationDUnitTest extends
//The indexes may not have been completely created yet, because the buckets
//may still be recovering from disk.
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexCreationDuringPersistentRecovery done ");
}
@@ -571,7 +571,7 @@ public class PRBasicIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedQueryWithIndexOnIdBug37089 started ");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -595,7 +595,7 @@ public class PRBasicIndexCreationDUnitTest extends
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
// validation on index usage with queries over a pr
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedQueryWithIndexOnIdBug37089 done ");
}
@@ -613,11 +613,11 @@ public class PRBasicIndexCreationDUnitTest extends
// VM vm3 = host.getVM(3);
// closeAllCache();
final String fileName = "PRIndexCreation.xml";
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testCreatePartitionedIndexThroughXML started");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Starting and initializing partitioned regions and indexes using xml");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Starting a pr asynchronously using an xml file name : " + fileName);
// AsyncInvocation asyInvk0 = vm0.invokeAsync(PRQHelp
// .getCacheSerializableRunnableForPRCreateThrougXML(name, fileName));
@@ -663,7 +663,7 @@ public class PRBasicIndexCreationDUnitTest extends
cnt, cntDest));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testCreatePartitionedIndexThroughXML is done " );
@@ -757,7 +757,7 @@ public class PRBasicIndexCreationDUnitTest extends
int totalDataSize = 90;
final int i = 0;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -769,7 +769,7 @@ public class PRBasicIndexCreationDUnitTest extends
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -780,17 +780,17 @@ public class PRBasicIndexCreationDUnitTest extends
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -808,7 +808,7 @@ public class PRBasicIndexCreationDUnitTest extends
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -826,14 +826,14 @@ public class PRBasicIndexCreationDUnitTest extends
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnKeyID", "key.ID","/" + name + ".keys key", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -855,7 +855,7 @@ public class PRBasicIndexCreationDUnitTest extends
int totalDataSize = 90;
final int i = 0;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -867,7 +867,7 @@ public class PRBasicIndexCreationDUnitTest extends
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -878,17 +878,17 @@ public class PRBasicIndexCreationDUnitTest extends
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -906,7 +906,7 @@ public class PRBasicIndexCreationDUnitTest extends
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -924,14 +924,14 @@ public class PRBasicIndexCreationDUnitTest extends
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name,
"PrIndexOnKeyID", "key.ID","/" + name + ".keys key", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndVerifyOrder(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -943,7 +943,7 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
int totalDataSize = 90;
final int i = 0;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -955,7 +955,7 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -966,17 +966,17 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -994,7 +994,7 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -1034,14 +1034,14 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
"rrIndexOnKeyStatus", "key.status", "/" + localName + ".keys key", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryWithLimit(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDeadlockDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDeadlockDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDeadlockDUnitTest.java
index 5889c1f..b09eb31 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDeadlockDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicIndexCreationDeadlockDUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -186,7 +186,7 @@ public class PRBasicIndexCreationDeadlockDUnitTest extends
});
for (AsyncInvocation async: asyns) {
- Threads.join(async, 10000, null);
+ ThreadUtils.join(async, 10000);
}
} finally {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicMultiIndexCreationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicMultiIndexCreationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicMultiIndexCreationDUnitTest.java
index 1a3961b..d020ef6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicMultiIndexCreationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicMultiIndexCreationDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -74,7 +74,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreationDUnitTest.testPRBasicIndexCreate started ....");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
@@ -84,7 +84,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// Scope.DISTRIBUTED_ACK, redundancy));
// Creating the Datastores Nodes in the VM1.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("PRBasicIndexCreationDUnitTest : creating all the prs ");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
@@ -116,7 +116,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
name, "PrIndexOnStatus", "p.status",null, "p"));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForDuplicatePRIndexCreate(
name, "PrIndexOnStatus", "p.status",null, "p"));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreationDUnitTest.testPRBasicIndexCreate is done ");
}
@@ -135,7 +135,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testPRMultiIndexCreation Test Started");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
@@ -203,7 +203,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
vm2.invoke(getIndexCheck);
vm3.invoke(getIndexCheck);
- LogWriterSupport.getLogWriter().info("PRQBasicIndexCreationTest.testPRMultiIndexCreation ENDED");
+ LogWriterUtils.getLogWriter().info("PRQBasicIndexCreationTest.testPRMultiIndexCreation ENDED");
}
@@ -225,7 +225,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testCreatePartitionedRegionThroughXMLAndAPI started ");
// creating all the prs
@@ -279,7 +279,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testCreatePartitionedIndexWithNoAliasAfterPuts started ");
// creating all the prs
@@ -336,7 +336,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testCreatePartitionedIndexWithNoAliasBeforePuts started ");
// creating all the prs
@@ -426,7 +426,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// final String fileName = "PRIndexCreation.xml";
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreateThrougXML(name,
// fileName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexUsageWithPRQuery started ");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -460,7 +460,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
vm1.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexUsageWithPRQuery done ");
}
@@ -480,7 +480,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// fileName));
int redundancy = 1;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexCreationDuringPersistentRecovery started ");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPersistentPRCreate(name,
@@ -524,7 +524,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
//The indexes may not have been completely created yet, because the buckets
//may still be recovering from disk.
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForIndexUsageCheck(name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedIndexCreationDuringPersistentRecovery done ");
}
@@ -542,7 +542,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedQueryWithIndexOnIdBug37089 started ");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -572,7 +572,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
// validation on index usage with queries over a pr
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRBasicIndexCreationDUnitTest.testPartitionedQueryWithIndexOnIdBug37089 done ");
}
@@ -590,11 +590,11 @@ public class PRBasicMultiIndexCreationDUnitTest extends
// VM vm3 = host.getVM(3);
// closeAllCache();
final String fileName = "PRIndexCreation.xml";
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testCreatePartitionedIndexThroughXML started");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Starting and initializing partitioned regions and indexes using xml");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Starting a pr asynchronously using an xml file name : " + fileName);
// AsyncInvocation asyInvk0 = vm0.invokeAsync(PRQHelp
// .getCacheSerializableRunnableForPRCreateThrougXML(name, fileName));
@@ -649,7 +649,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
cnt, cntDest));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicIndexCreation.testCreatePartitionedIndexThroughXML is done " );
@@ -750,7 +750,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
int totalDataSize = 90;
final int i = 0;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -762,7 +762,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -773,17 +773,17 @@ public class PRBasicMultiIndexCreationDUnitTest extends
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -801,7 +801,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -822,14 +822,14 @@ public class PRBasicMultiIndexCreationDUnitTest extends
vm1.invoke(PRQHelp.getCacheSerializableRunnableForDefineIndex(name, names, exps));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -851,7 +851,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
int totalDataSize = 90;
final int i = 0;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -863,7 +863,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -874,17 +874,17 @@ public class PRBasicMultiIndexCreationDUnitTest extends
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -902,7 +902,7 @@ public class PRBasicMultiIndexCreationDUnitTest extends
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -923,14 +923,14 @@ public class PRBasicMultiIndexCreationDUnitTest extends
vm1.invoke(PRQHelp.getCacheSerializableRunnableForDefineIndex(name, names, exps));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndVerifyOrder(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -942,7 +942,7 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
int totalDataSize = 90;
final int i = 0;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -954,7 +954,7 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -965,17 +965,17 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -993,7 +993,7 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -1049,14 +1049,14 @@ public void testIndexQueryingWithOrderByLimit() throws Exception
vm0.invoke(PRQHelp.getCacheSerializableRunnableForDefineIndex(localName, names2, exps2, fromClause2));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryWithLimit(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicQueryDUnitTest.java
index fd09ab1..147ed4d 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicQueryDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.cache.query.data.PortfolioData;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -77,13 +77,13 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating Accessor node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Accessor node in the PR");
@@ -93,22 +93,22 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(localName,
// Scope.DISTRIBUTED_ACK, redundancy));
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Accessor node in the PR");
// Creating the Datastores Nodes in the VM1.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest:testPRBasicQuerying ----- Creating the Datastore node in the PR");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created the Datastore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -122,7 +122,7 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRDuplicatePuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
@@ -136,7 +136,7 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -148,13 +148,13 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating Accessor node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Creating the Accessor node in the PR");
@@ -164,12 +164,12 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
// vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(localName,
// Scope.DISTRIBUTED_ACK, redundancy));
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Successfully created the Accessor node in the PR");
// Creating the Datastores Nodes in the VM1.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest:testPRCountStarQuery ----- Creating the Datastore node in the PR");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -179,11 +179,11 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, Portfolio.class));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(localName, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Successfully Created the Datastore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Successfully Created PR's across all VM's");
@@ -197,7 +197,7 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRDuplicatePuts(name, portfolio,
cnt, cntDest+100));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Inserted Portfolio data across PR's");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
@@ -217,7 +217,7 @@ public class PRBasicQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCountStarQueries(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRCountStarQuery: Querying PR's Test ENDED");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicRemoveIndexDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicRemoveIndexDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicRemoveIndexDUnitTest.java
index f27b66d..81ee3bf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicRemoveIndexDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRBasicRemoveIndexDUnitTest.java
@@ -19,7 +19,7 @@ package com.gemstone.gemfire.cache.query.partitioned;
import com.gemstone.gemfire.cache.query.data.PortfolioData;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -69,7 +69,7 @@ public class PRBasicRemoveIndexDUnitTest extends PartitionedRegionDUnitTestCase
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicRemoveIndexDUnitTest.testPRBasicIndexCreate test now starts ....");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
@@ -97,7 +97,7 @@ public class PRBasicRemoveIndexDUnitTest extends PartitionedRegionDUnitTestCase
//remove indexes
vm1.invoke(PRQHelp.getCacheSerializableRunnableForRemoveIndex(name, false));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicRemoveIndexDUnitTest.testPRBasicRemoveIndex test now ends sucessfully");
}
@@ -112,7 +112,7 @@ public class PRBasicRemoveIndexDUnitTest extends PartitionedRegionDUnitTestCase
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRBasicRemoveIndexDUnitTest.testPRBasicIndexCreate test now starts ....");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
[20/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionWithSameNameDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionWithSameNameDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionWithSameNameDUnitTest.java
index 807a91f..d8eaf9c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionWithSameNameDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionWithSameNameDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -108,7 +108,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testNameWithPartitionRegionFirstOnSameVM() - Partition Regions successfully created ");
// creating distributed region on same vm with same name as previouslu
@@ -119,7 +119,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
vmList = addNodeToList(startIndexForNode, endIndexForNode);
createDistributedRegion(vmList, startIndexForRegion, endIndexForRegion,
Scope.DISTRIBUTED_ACK, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testNameWithPartitionRegionFirstOnSameVM() - test completed successfully ");
}
@@ -160,7 +160,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testNameWithPartitionRegionFirstOnSameVM() - test completed successfully ");
}
@@ -197,7 +197,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testNameWithPartitionRegionFirstOnDifferentVM() - Partition Regions successfully created ");
// creating distrubuted region with the scope = DISTRIBUTED_ACK on
@@ -210,7 +210,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
vmList = addNodeToList(startIndexForNode, endIndexForNode);
createDistributedRegion(vmList, startIndexForRegion, endIndexForRegion,
Scope.DISTRIBUTED_ACK, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testNameWithPartitionRegionFirstOnDifferentVM() - test completed successfully ");
}
@@ -252,7 +252,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testNameWithLocalRegionFirstOnDifferentVM() - test completed successfully ");
}
@@ -293,7 +293,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionRegionVsLocalRegionFirst() - test completed successfully ");
}
@@ -335,7 +335,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createDistributedRegion(vmList, startIndexForRegion, endIndexForRegion,
Scope.LOCAL, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionRegionVsLocalRegionSecond() - test completed successfully ");
}
@@ -367,12 +367,12 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testWithPartitionedRegionAsParentRegionAndDistributedSubRegion() - Parent region as partitioned region is created ");
// create subregion of partition region
createSubRegionOfPartitionedRegion(vmList, DISTRIBUTED_REGION);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testWithPartitionedRegionAsParentRegionAndDistributedSubRegion() completed Successfully ");
}
@@ -405,12 +405,12 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testWithPartitionedRegionAsParentRegionAndPartitionedSubRegion() - Parent region as partitioned region is created ");
// create subregion of partition region
createSubRegionOfPartitionedRegion(vmList, PARTITIONED_REGION);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testWithPartitionedRegionAsParentRegionAndPartitionedSubRegion() completed Successfully ");
}
@@ -442,7 +442,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createDistributedRegion(vmList, startIndexForRegion, endIndexForRegion,
Scope.DISTRIBUTED_ACK, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testWithSubRegionPartitionedRegionFirst() - Parent region is created");
// creating distributed region as subregion of parent on vm0
prPrefix = "child_region";
@@ -450,7 +450,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
endIndexForNode = 1;
vmList = addNodeToList(startIndexForNode, endIndexForNode);
createPartitionedSubRegion(vmList, firstCreationFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testWithSubRegionPartitionedRegionFirst() - Partitioned sub region on vm0 ");
// creating partiton region as subregion of parent region with the same name
@@ -459,7 +459,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
endIndexForNode = 4;
vmList = addNodeToList(startIndexForNode, endIndexForNode);
createDistributedSubRegion(vmList, firstCreationFlag);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testWithSubRegionPartitionedRegionFirst() completed successfully ");
}
@@ -491,7 +491,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
firstCreationFlag = true;
createDistributedRegion(vmList, startIndexForRegion, endIndexForRegion,
Scope.DISTRIBUTED_ACK, firstCreationFlag, multipleVMFlag);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testWithSubRegionDistributedRegionFirst() - Parent region is created");
// creating distributed region as subregion of parent on vm0
prPrefix = "child_region";
@@ -499,7 +499,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
endIndexForNode = 1;
vmList = addNodeToList(startIndexForNode, endIndexForNode);
createDistributedSubRegion(vmList, firstCreationFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testWithSubRegionDistributedRegionFirst() - Distributed sub region on vm0 ");
// creating partiton region as subregion of parent region with the same name
@@ -508,7 +508,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
endIndexForNode = 4;
vmList = addNodeToList(startIndexForNode, endIndexForNode);
createPartitionedSubRegion(vmList, firstCreationFlag);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testWithSubRegionDistributedRegionFirst() completed successfully ");
}
@@ -619,7 +619,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
case DISTRIBUTED_REGION: {
Region childRegion = parentRegion.createSubregion("child_region",
ra);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Subregion is created as : "
+ childRegion.getName());
}
@@ -627,7 +627,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
case PARTITIONED_REGION: {
Region childRegion = parentRegion.createSubregion("child_region",
createRegionAttrsForPR(0, 200));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Subregion is created as : "
+ childRegion.getName());
@@ -881,7 +881,7 @@ public class PartitionedRegionWithSameNameDUnitTest extends
}
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"createMultiplePartitionRegion() - Partition Regions Successfully Completed ");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllDAckDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllDAckDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllDAckDUnitTest.java
index 278be8b..495ad0e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllDAckDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllDAckDUnitTest.java
@@ -41,7 +41,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -73,7 +73,7 @@ public class PutAllDAckDUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0.invoke(PutAllDAckDUnitTest.class, "createCacheForVM0");
vm1.invoke(PutAllDAckDUnitTest.class, "createCacheForVM1");
- LogWriterSupport.getLogWriter().fine("Cache created successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created successfully");
}
public void preTearDown(){
@@ -216,7 +216,7 @@ public class PutAllDAckDUnitTest extends DistributedTestCase {
// }
beforeCreateputAllcounter++;
- LogWriterSupport.getLogWriter().fine("*******BeforeCreate*****");
+ LogWriterUtils.getLogWriter().fine("*******BeforeCreate*****");
beforeCreate = true;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllGlobalDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllGlobalDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllGlobalDUnitTest.java
index 719024e..72e5439 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllGlobalDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PutAllGlobalDUnitTest.java
@@ -49,9 +49,9 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -89,7 +89,7 @@ public class PutAllGlobalDUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0.invoke(PutAllGlobalDUnitTest.class, "createCacheForVM0");
vm1.invoke(PutAllGlobalDUnitTest.class, "createCacheForVM1");
- LogWriterSupport.getLogWriter().fine("Cache created successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created successfully");
}
public void preTearDown(){
@@ -185,16 +185,16 @@ public class PutAllGlobalDUnitTest extends DistributedTestCase {
long startTime = 0;
try{
Thread.sleep(500);
- LogWriterSupport.getLogWriter().info("async2 proceeding with put operation");
+ LogWriterUtils.getLogWriter().info("async2 proceeding with put operation");
startTime = System.currentTimeMillis();
region.put(new Integer(1),"mapVal");
- LogWriterSupport.getLogWriter().info("async2 done with put operation");
+ LogWriterUtils.getLogWriter().info("async2 done with put operation");
fail("Should have thrown TimeoutException");
}catch(TimeoutException Tx){
// Tx.printStackTrace();
- LogWriterSupport.getLogWriter().info("PASS: As expected Caught TimeoutException ");
+ LogWriterUtils.getLogWriter().info("PASS: As expected Caught TimeoutException ");
if (startTime + TIMEOUT_PERIOD + DLockGrantor.GRANTOR_THREAD_MAX_WAIT /* slop of grantor max wait ms */ < System.currentTimeMillis()) {
- LogWriterSupport.getLogWriter().warning("though this test passed, the put() timed out in "
+ LogWriterUtils.getLogWriter().warning("though this test passed, the put() timed out in "
+ (System.currentTimeMillis() - startTime) +
" instead of the expected " + TIMEOUT_PERIOD + " milliseconds");
}
@@ -206,13 +206,13 @@ public class PutAllGlobalDUnitTest extends DistributedTestCase {
}
});
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async2, 30 * 1000);
if (async2.exceptionOccurred()) {
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
Assert.fail("async2 failed", async2.getException());
}
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if (async1.exceptionOccurred()) {
Assert.fail("async1 failed", async1.getException());
}
@@ -224,14 +224,14 @@ public class PutAllGlobalDUnitTest extends DistributedTestCase {
public static void putAllMethod() throws Exception {
Map m = new HashMap();
serverSocket.accept();
- LogWriterSupport.getLogWriter().info("async1 connection received - continuing with putAll operation");
+ LogWriterUtils.getLogWriter().info("async1 connection received - continuing with putAll operation");
serverSocket.close();
try{
for (int i=1; i<2; i++) {
m.put(new Integer(i), String.valueOf(i));
}
region.putAll(m);
- LogWriterSupport.getLogWriter().info("async1 done with putAll operation");
+ LogWriterUtils.getLogWriter().info("async1 done with putAll operation");
}catch(Exception ex){
// ex.printStackTrace();
@@ -280,13 +280,13 @@ public class PutAllGlobalDUnitTest extends DistributedTestCase {
static class BeforeCreateCallback extends CacheWriterAdapter {
public void beforeCreate(EntryEvent event){
- LogWriterSupport.getLogWriter().info("beforeCreate invoked for " + event.getKey());
+ LogWriterUtils.getLogWriter().info("beforeCreate invoked for " + event.getKey());
try{
Thread.sleep(5000);
}catch(InterruptedException ex) {
fail("interrupted");
}
- LogWriterSupport.getLogWriter().info("beforeCreate done for " + event.getKey());
+ LogWriterUtils.getLogWriter().info("beforeCreate done for " + event.getKey());
}
}// end of BeforeCreateCallback
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoteTransactionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoteTransactionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoteTransactionDUnitTest.java
index d1b3b5d..81b2a73 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoteTransactionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoteTransactionDUnitTest.java
@@ -99,7 +99,7 @@ import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -253,7 +253,7 @@ public class RemoteTransactionDUnitTest extends CacheTestCase {
}
public Object call() throws Exception {
CacheTransactionManager mgr = getGemfireCache().getTxManager();
- LogWriterSupport.getLogWriter().fine("testTXPut starting tx");
+ LogWriterUtils.getLogWriter().fine("testTXPut starting tx");
mgr.begin();
Region<CustId, Customer> custRegion = getCache().getRegion(CUSTOMER);
Region<OrderId, Order> orderRegion = getCache().getRegion(ORDER);
@@ -2539,7 +2539,7 @@ public class RemoteTransactionDUnitTest extends CacheTestCase {
PartitionedRegion pr = (PartitionedRegion)getGemfireCache().getRegion(CUSTOMER);
Set filter = new HashSet();
filter.add(expectedCustId);
- LogWriterSupport.getLogWriter().info("SWAP:inside NestedTxFunc calling func2:");
+ LogWriterUtils.getLogWriter().info("SWAP:inside NestedTxFunc calling func2:");
r.put(expectedCustId, expectedCustomer);
FunctionService.onRegion(pr).withFilter(filter).execute(new NestedTxFunction2()).getResult();
assertNotNull(getGemfireCache().getTxManager().getTXState());
@@ -3465,7 +3465,7 @@ public class RemoteTransactionDUnitTest extends CacheTestCase {
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port);
ccf.setPoolSubscriptionEnabled(true);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<Integer, String> crf = cCache
.createClientRegionFactory(isEmpty ? ClientRegionShortcut.PROXY
@@ -3677,7 +3677,7 @@ protected static class ClientListener extends CacheListenerAdapter {
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port);
ccf.setPoolSubscriptionEnabled(true);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<Integer, String> crf = cCache
.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
@@ -3955,12 +3955,12 @@ protected static class ClientListener extends CacheListenerAdapter {
private int count;
@Override
public void afterCreate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("afterCreate invoked for " + event);
+ LogWriterUtils.getLogWriter().info("afterCreate invoked for " + event);
count++;
}
@Override
public void afterUpdate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("afterUpdate invoked for " + event);
+ LogWriterUtils.getLogWriter().info("afterUpdate invoked for " + event);
count++;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveAllDAckDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveAllDAckDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveAllDAckDUnitTest.java
index 9c0ef03..f4ab8a1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveAllDAckDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveAllDAckDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -70,7 +70,7 @@ public class RemoveAllDAckDUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0.invoke(RemoveAllDAckDUnitTest.class, "createCacheForVM0");
vm1.invoke(RemoveAllDAckDUnitTest.class, "createCacheForVM1");
- LogWriterSupport.getLogWriter().fine("Cache created successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created successfully");
}
public void preTearDown(){
@@ -161,7 +161,7 @@ public class RemoveAllDAckDUnitTest extends DistributedTestCase {
beforeDestroyRemoveAllcounter++;
assertEquals(true, event.getOperation().isRemoveAll());
assertEquals("removeAllCallback", event.getCallbackArgument());
- LogWriterSupport.getLogWriter().fine("*******BeforeDestroy*****");
+ LogWriterUtils.getLogWriter().fine("*******BeforeDestroy*****");
beforeDestroy = true;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveDAckDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveDAckDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveDAckDUnitTest.java
index 38d7ca3..6d5b9d6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveDAckDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveDAckDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -66,7 +66,7 @@ public class RemoveDAckDUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0.invoke(RemoveDAckDUnitTest.class, "createCacheVM0");
vm1.invoke(RemoveDAckDUnitTest.class, "createCacheVM1");
- LogWriterSupport.getLogWriter().fine("Cache created in successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created in successfully");
}
public void preTearDown(){
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveGlobalDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveGlobalDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveGlobalDUnitTest.java
index 72d5a06..b4907ae 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveGlobalDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/RemoveGlobalDUnitTest.java
@@ -39,9 +39,9 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -152,7 +152,7 @@ public class RemoveGlobalDUnitTest extends DistributedTestCase {
}
});
- Threads.join(async, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
if(async.exceptionOccurred())
throw async.getException();
@@ -225,7 +225,7 @@ public class RemoveGlobalDUnitTest extends DistributedTestCase {
}
});
- Threads.join(async, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
if(async.exceptionOccurred())
throw async.getException();
@@ -243,7 +243,7 @@ public class RemoveGlobalDUnitTest extends DistributedTestCase {
}catch(InterruptedException ex){
fail("interrupted");
}
- LogWriterSupport.getLogWriter().fine("quitingfromcachewriter");
+ LogWriterUtils.getLogWriter().fine("quitingfromcachewriter");
}
}///////////
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SimpleDiskRegionJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SimpleDiskRegionJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SimpleDiskRegionJUnitTest.java
index bc2e3bf..2f14119 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SimpleDiskRegionJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SimpleDiskRegionJUnitTest.java
@@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
import com.gemstone.gemfire.StatisticsFactory;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -363,11 +363,11 @@ public class SimpleDiskRegionJUnitTest extends DiskRegionTestingBase
thread4.start();
thread5.start();
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
- Threads.join(thread3, 30 * 1000, null);
- Threads.join(thread4, 30 * 1000, null);
- Threads.join(thread5, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
+ ThreadUtils.join(thread3, 30 * 1000);
+ ThreadUtils.join(thread4, 30 * 1000);
+ ThreadUtils.join(thread5, 30 * 1000);
if (keyIds.size() != 50000) {
fail("Size not equal to 5000 as expected but is " + keyIds.size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SingleHopStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SingleHopStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SingleHopStatsDUnitTest.java
index dabda5e..4b016a4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SingleHopStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SingleHopStatsDUnitTest.java
@@ -42,10 +42,10 @@ import com.gemstone.gemfire.internal.cache.execute.data.OrderId;
import com.gemstone.gemfire.internal.cache.execute.data.ShipmentId;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -133,7 +133,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
}
finally {
- DistributedTestSupport.unregisterAllDataSerializersFromAllVms();
+ DistributedTestUtils.unregisterAllDataSerializersFromAllVms();
}
}
@@ -256,7 +256,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attr.setDataPolicy(DataPolicy.REPLICATE);
region = cache.createRegion(Region_Name, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + Region_Name + " created Successfully :"
+ region.toString());
}else{
@@ -267,7 +267,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(Region_Name, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + Region_Name + " created Successfully :"
+ region.toString());
}
@@ -281,7 +281,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attr.setPartitionAttributes(paf.create());
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -293,7 +293,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attr.setPartitionAttributes(paf.create());
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -305,7 +305,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attr.setPartitionAttributes(paf.create());
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
}
@@ -320,7 +320,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
RegionAttributes attrs = factory.create();
region = cache.createRegion(Region_Name, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Region " + Region_Name + " created Successfully :" + region.toString());
}
else {
@@ -329,7 +329,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
RegionAttributes attrs = factory.create();
customerRegion = cache.createRegion("CUSTOMER", attrs);
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -338,7 +338,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attrs = factory.create();
orderRegion = cache.createRegion("ORDER", attrs);
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -347,7 +347,7 @@ public class SingleHopStatsDUnitTest extends CacheTestCase{
attrs = factory.create();
shipmentRegion = cache.createRegion("SHIPMENT", attrs);
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SystemFailureDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SystemFailureDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SystemFailureDUnitTest.java
index a1dc855..e47892e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SystemFailureDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/SystemFailureDUnitTest.java
@@ -56,7 +56,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
///////// Public test methods
public void testNullFailure() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("TODO: this test needs to use VM#bounce.");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("TODO: this test needs to use VM#bounce.");
try {
SystemFailure.initiateFailure(null);
fail("Null failure set allowed");
@@ -308,7 +308,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
static private final Runnable listener1 = new Runnable() {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Inside of preListener1");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Inside of preListener1");
listenerCount.addAndGet(1);
}
};
@@ -495,7 +495,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
protected static void message(String s) {
System.out.println(s);
System.err.println(s);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(s);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(s);
cache.getLogger().info(s);
}
@@ -545,7 +545,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
}
public void afterCreate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Invoking afterCreate on listener; name=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Invoking afterCreate on listener; name=" +
event.getKey());
forceOutOfMemory();
}
@@ -590,7 +590,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
}
public void afterCreate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Invoking afterCreate on listener; name=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Invoking afterCreate on listener; name=" +
event.getKey());
forceOutOfMemory();
}
@@ -660,7 +660,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
public void afterCreate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Invoking afterCreate on listener; name=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Invoking afterCreate on listener; name=" +
event.getKey());
forceLowMemory();
}
@@ -676,7 +676,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
public void afterCreate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Invoking afterCreate on listener; name=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Invoking afterCreate on listener; name=" +
event.getKey());
forceInternalError();
}
@@ -692,7 +692,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
public void afterCreate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Invoking afterCreate on listener; name=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Invoking afterCreate on listener; name=" +
event.getKey());
forceInternalError();
}
@@ -704,7 +704,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
public void afterCreate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Invoking afterCreate on listener; name=" +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Invoking afterCreate on listener; name=" +
event.getKey());
forceError();
}
@@ -756,7 +756,7 @@ public class SystemFailureDUnitTest extends DistributedCacheTestCase {
}
protected void doCreateEntry(String name) {
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
log.info(
"<ExpectedException action=add>" +
"dunit.RMIException"
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TXReservationMgrJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TXReservationMgrJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TXReservationMgrJUnitTest.java
index ef5b9ef..b1d1e64 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TXReservationMgrJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TXReservationMgrJUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.CommitConflictException;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.distributed.DistributedSystem;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@Category(IntegrationTest.class)
@@ -124,7 +124,7 @@ public class TXReservationMgrJUnitTest {
threads[i].start();
}
for (int i=0; i < THREAD_COUNT; i++) {
- Threads.join(threads[i], 60 * 1000, null); // increased from 30 to 60 for parallel junit runs
+ ThreadUtils.join(threads[i], 60 * 1000); // increased from 30 to 60 for parallel junit runs
}
int invalidCount = 0;
for (int i=0; i < KEY_COUNT; i++) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TransactionsWithDeltaDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TransactionsWithDeltaDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TransactionsWithDeltaDUnitTest.java
index a0549a4..fdb7ada 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TransactionsWithDeltaDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/TransactionsWithDeltaDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.internal.cache.execute.data.CustId;
import com.gemstone.gemfire.internal.cache.execute.data.Order;
import com.gemstone.gemfire.internal.cache.execute.data.OrderId;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -115,7 +115,7 @@ public class TransactionsWithDeltaDUnitTest extends CacheTestCase {
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<Integer, String> crf = cCache
.createClientRegionFactory(isEmpty ? ClientRegionShortcut.PROXY
@@ -331,7 +331,7 @@ public class TransactionsWithDeltaDUnitTest extends CacheTestCase {
pr.put(cust1, new Customer(1, "name1"));
Iterator<CustId> it = pr.keySet().iterator();
while (it.hasNext()) {
- LogWriterSupport.getLogWriter().info("SWAP:iterator1:"+pr.get(it.next()));
+ LogWriterUtils.getLogWriter().info("SWAP:iterator1:"+pr.get(it.next()));
}
Customer c = pr.get(cust1);
assertNotNull(c);
@@ -349,10 +349,10 @@ public class TransactionsWithDeltaDUnitTest extends CacheTestCase {
mgr.begin();
Customer c = pr.get(cust1);
c.setName("updatedName");
- LogWriterSupport.getLogWriter().info("SWAP:doingPut");
+ LogWriterUtils.getLogWriter().info("SWAP:doingPut");
pr.put(cust1, c);
- LogWriterSupport.getLogWriter().info("SWAP:getfromtx:"+pr.get(cust1));
- LogWriterSupport.getLogWriter().info("SWAP:doingCommit");
+ LogWriterUtils.getLogWriter().info("SWAP:getfromtx:"+pr.get(cust1));
+ LogWriterUtils.getLogWriter().info("SWAP:doingCommit");
assertEquals("updatedName", pr.get(cust1).getName());
TXStateProxy tx = mgr.internalSuspend();
assertEquals("name1", pr.get(cust1).getName());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/control/RebalanceOperationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/control/RebalanceOperationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/control/RebalanceOperationDUnitTest.java
index aa58ef8..6c2cc98 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/control/RebalanceOperationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/control/RebalanceOperationDUnitTest.java
@@ -77,7 +77,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -1869,7 +1869,7 @@ public class RebalanceOperationDUnitTest extends CacheTestCase {
assertEquals(12, details.getCreatedBucketCount());
assertEquals(1,details.getActualRedundantCopies());
assertEquals(0,details.getLowRedundancyBucketCount());
- LogWriterSupport.getLogWriter().info("details=" + details.getPartitionMemberInfo());
+ LogWriterUtils.getLogWriter().info("details=" + details.getPartitionMemberInfo());
long afterSize = 0;
for(PartitionMemberInfo memberDetails: details.getPartitionMemberInfo()) {
assertEquals(8, memberDetails.getBucketCount());
@@ -2011,7 +2011,7 @@ public class RebalanceOperationDUnitTest extends CacheTestCase {
assertEquals(12, details.getCreatedBucketCount());
assertEquals(1,details.getActualRedundantCopies());
assertEquals(0,details.getLowRedundancyBucketCount());
- LogWriterSupport.getLogWriter().info("details=" + details.getPartitionMemberInfo());
+ LogWriterUtils.getLogWriter().info("details=" + details.getPartitionMemberInfo());
long afterSize = 0;
for(PartitionMemberInfo memberDetails: details.getPartitionMemberInfo()) {
assertEquals(8, memberDetails.getBucketCount());
@@ -2080,7 +2080,7 @@ public class RebalanceOperationDUnitTest extends CacheTestCase {
assertEquals(12, details.getCreatedBucketCount());
assertEquals(1,details.getActualRedundantCopies());
assertEquals(0,details.getLowRedundancyBucketCount());
- LogWriterSupport.getLogWriter().info("details=" + details.getPartitionMemberInfo());
+ LogWriterUtils.getLogWriter().info("details=" + details.getPartitionMemberInfo());
long afterSize = 0;
for(PartitionMemberInfo memberDetails: details.getPartitionMemberInfo()) {
assertEquals(6, memberDetails.getBucketCount());
@@ -2195,7 +2195,7 @@ public class RebalanceOperationDUnitTest extends CacheTestCase {
assertEquals(12, details.getCreatedBucketCount());
assertEquals(1,details.getActualRedundantCopies());
assertEquals(0,details.getLowRedundancyBucketCount());
- LogWriterSupport.getLogWriter().info("details=" + details.getPartitionMemberInfo());
+ LogWriterUtils.getLogWriter().info("details=" + details.getPartitionMemberInfo());
long afterSize = 0;
for(PartitionMemberInfo memberDetails: details.getPartitionMemberInfo()) {
assertEquals(8, memberDetails.getBucketCount());
@@ -2505,7 +2505,7 @@ public class RebalanceOperationDUnitTest extends CacheTestCase {
}
cacheWriter.release();
- LogWriterSupport.getLogWriter().info("starting wait for rebalance. Will wait for " + MAX_WAIT + " seconds");
+ LogWriterUtils.getLogWriter().info("starting wait for rebalance. Will wait for " + MAX_WAIT + " seconds");
RebalanceResults results = rebalance.getResults(MAX_WAIT, TimeUnit.SECONDS);
assertEquals(2, results.getTotalBucketCreatesCompleted());
assertEquals(1, results.getTotalPrimaryTransfersCompleted());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/diskPerf/MultiThreadedOplogPerJUnitPerformanceTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/diskPerf/MultiThreadedOplogPerJUnitPerformanceTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/diskPerf/MultiThreadedOplogPerJUnitPerformanceTest.java
index 4d18cde..5e2da16 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/diskPerf/MultiThreadedOplogPerJUnitPerformanceTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/diskPerf/MultiThreadedOplogPerJUnitPerformanceTest.java
@@ -34,7 +34,7 @@ import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.cache.DiskStoreFactoryImpl;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@Category(IntegrationTest.class)
@@ -142,7 +142,7 @@ public class MultiThreadedOplogPerJUnitPerformanceTest
}
for (int i = 0; i < numberOfThreads; i++) {
- Threads.join(threads[i], 30 * 1000, null);
+ ThreadUtils.join(threads[i], 30 * 1000);
}
long totalPuts = ((long)numberOfIterations * numberOfKeysPerThread * numberOfThreads);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/Bug51193DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/Bug51193DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/Bug51193DUnitTest.java
index d89d6ce..0fe4bd0 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/Bug51193DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/Bug51193DUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.internal.cache.tier.ClientHandShake;
import com.gemstone.gemfire.internal.cache.tier.sockets.AcceptorImpl;
import com.gemstone.gemfire.internal.cache.tier.sockets.ServerConnection;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
@@ -118,7 +118,7 @@ public class Bug51193DUnitTest extends DistributedTestCase {
public static Integer createServerCache(Boolean createPR)
throws Exception {
Properties props = new Properties();
- props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
Bug51193DUnitTest test = new Bug51193DUnitTest("Bug51193DUnitTest");
DistributedSystem ds = test.getSystem(props);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ClientServerFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ClientServerFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ClientServerFunctionExecutionDUnitTest.java
index 6882106..3890498 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ClientServerFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ClientServerFunctionExecutionDUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -102,7 +102,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution", new Object[] { isByName, function, toRegister});
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -119,7 +119,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution_SendException", new Object[] { isByName, function, toRegister});
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -138,7 +138,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution_NoLastResult", new Object[] { isByName, function, toRegister});
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -154,7 +154,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(false);
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution", new Object[] { isByName, function, toRegister});
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -165,7 +165,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
*/
public void testServerExecution_byInlineFunction() {
createScenario();
- LogWriterSupport.getLogWriter().info("ClientServerFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution_Inline");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -178,7 +178,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
*/
public void testServerExecution_byInlineFunction_InvalidAttrbiutes() {
createScenario();
- LogWriterSupport.getLogWriter().info("ClientServerFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution_Inline_InvalidAttributes");
}
@@ -188,7 +188,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
*/
public void testBug40714() {
createScenario();
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"ClientServerFunctionExecutionDUnitTest#testBug40714 : Starting test");
@@ -258,7 +258,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation.");
}
}
@@ -273,7 +273,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(true);
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution", new Object[] { isByName, function, toRegister});
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -287,7 +287,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
registerFunctionAtServer(function);
isByName = new Boolean(true);
toRegister = new Boolean(false);
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#testServerSingleKeyExecution_byName : Starting test");
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"serverExecution", new Object[] { isByName, function, toRegister});
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
@@ -322,7 +322,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
function = new TestFunction(true, TestFunction.TEST_FUNCTION_HA_SERVER);
registerFunctionAtServer(function);
@@ -360,7 +360,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
function = new TestFunction(true, TestFunction.TEST_FUNCTION_HA_SERVER);
registerFunctionAtServer(function);
@@ -400,7 +400,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
client.invoke(ClientServerFunctionExecutionDUnitTest.class,
"createProxyRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()) });
function = new TestFunction(true, TestFunction.TEST_FUNCTION_NONHA_SERVER);
registerFunctionAtServer(function);
@@ -431,7 +431,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
private void createScenario() {
- LogWriterSupport.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#createScenario : creating scenario");
+ LogWriterUtils.getLogWriter().info("ClientServerFFunctionExecutionDUnitTest#createScenario : creating scenario");
createClientServerScenarionWithoutRegion();
}
@@ -450,7 +450,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation");
}
@@ -470,7 +470,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operations");
}
}
@@ -507,7 +507,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation");
}
@@ -534,7 +534,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operations");
}
}
@@ -579,7 +579,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
String excuse;
public boolean done() {
int sz = pool.getConnectedServerCount();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Checking for the Live Servers : Expected : " + expectedLiveServers
+ " Available :" + sz);
if (sz == expectedLiveServers.intValue()) {
@@ -613,7 +613,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
assertEquals(retryRegionName, ((List)rs.getResult()).get(0));
} catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
return rs.getResult();
@@ -636,7 +636,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
} catch (Exception ex) {
if (!(ex instanceof ServerConnectivityException)) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
}
@@ -660,7 +660,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
} catch (Exception ex) {
if (!(ex instanceof ServerConnectivityException)) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation");
}
}
@@ -734,7 +734,7 @@ public class ClientServerFunctionExecutionDUnitTest extends PRClientServerTestBa
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation nn TRUE");
}
}
@@ -769,7 +769,7 @@ public static void serverExecution_Inline_InvalidAttributes() {
fail("Should have failed with Invalid attributes.");
}catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
assertTrue(ex.getMessage().contains(
"For Functions with isHA true, hasResult must also be true."));
}
@@ -798,7 +798,7 @@ public static void serverExecution_Inline_InvalidAttributes() {
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation");
}
@@ -823,7 +823,7 @@ public static void serverExecution_Inline_InvalidAttributes() {
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation");
}
@@ -851,7 +851,7 @@ public static void allServerExecution_SendException(Boolean isByName, Function f
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation");
}
@@ -877,7 +877,7 @@ public static void allServerExecution_SendException(Boolean isByName, Function f
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation");
}
@@ -932,7 +932,7 @@ public static void allServerExecution_SendException(Boolean isByName, Function f
}catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : " , ex);
+ LogWriterUtils.getLogWriter().info("Exception : " , ex);
fail("Test failed after the execute operation asdfasdfa ");
}
}
@@ -940,9 +940,9 @@ public static void allServerExecution_SendException(Boolean isByName, Function f
private static ResultCollector execute(Execution member,
Serializable args, Function function, Boolean isByName) throws Exception {
if (isByName.booleanValue()) {// by name
- LogWriterSupport.getLogWriter().info("The function name to execute : " + function.getId());
+ LogWriterUtils.getLogWriter().info("The function name to execute : " + function.getId());
Execution me = member.withArgs(args);
- LogWriterSupport.getLogWriter().info("The args passed : " + args);
+ LogWriterUtils.getLogWriter().info("The args passed : " + args);
return me.execute(function.getId());
}
else { // By Instance
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ColocationFailoverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ColocationFailoverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ColocationFailoverDUnitTest.java
index 360c1ff..0d6dca5 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ColocationFailoverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/ColocationFailoverDUnitTest.java
@@ -43,7 +43,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -216,7 +216,7 @@ public class ColocationFailoverDUnitTest extends DistributedTestCase {
protected static void dump() {
- final InternalLogWriter logger = LogWriterSupport.getLogWriter();
+ final InternalLogWriter logger = LogWriterUtils.getLogWriter();
((PartitionedRegion)customerPR).dumpAllBuckets(false);
((PartitionedRegion)orderPR).dumpAllBuckets(false);
((PartitionedRegion)shipmentPR).dumpAllBuckets(false);
@@ -431,7 +431,7 @@ public class ColocationFailoverDUnitTest extends DistributedTestCase {
if (partitionedRegionName.equals(customerPR_Name)) {
customerPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(customerPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + customerPR);
@@ -439,7 +439,7 @@ public class ColocationFailoverDUnitTest extends DistributedTestCase {
if (partitionedRegionName.equals(orderPR_Name)) {
orderPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(orderPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + orderPR);
@@ -448,7 +448,7 @@ public class ColocationFailoverDUnitTest extends DistributedTestCase {
if (partitionedRegionName.equals(shipmentPR_Name)) {
shipmentPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(shipmentPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + shipmentPR);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/DistributedRegionFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/DistributedRegionFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/DistributedRegionFunctionExecutionDUnitTest.java
index 0ec7b20..0624f0f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/DistributedRegionFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/DistributedRegionFunctionExecutionDUnitTest.java
@@ -56,8 +56,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -537,7 +537,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
replicate1.invoke(DistributedRegionFunctionExecutionDUnitTest.class,
"disconnect");
- Threads.join(async[0], 50 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 50 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -693,7 +693,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
"startServerHA");
emptyServer1.invoke(DistributedRegionFunctionExecutionDUnitTest.class,
"closeCacheHA");
- Threads.join(async[0], 4 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 4 * 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
}
@@ -1138,7 +1138,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
ds.disconnect();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : " + e.getMessage());
e.printStackTrace();
Assert.fail("Test failed", e);
}
@@ -1418,7 +1418,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
factory.setDataPolicy(policy);
assertNotNull(cache);
region = cache.createRegion(REGION_NAME, factory.create());
- LogWriterSupport.getLogWriter().info("Client Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Client Region Created :" + region);
assertNotNull(region);
for (int i = 1; i <= 200; i++) {
region.put("execKey-" + i, new Integer(i));
@@ -1444,7 +1444,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
factory.setPoolName(p.getName());
assertNotNull(cache);
region = cache.createRegion(REGION_NAME, factory.create());
- LogWriterSupport.getLogWriter().info("Client Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Client Region Created :" + region);
assertNotNull(region);
}
@@ -1454,7 +1454,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
factory.setDataPolicy(policy);
assertNotNull(cache);
region = cache.createRegion(REGION_NAME, factory.create());
- LogWriterSupport.getLogWriter().info("Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Region Created :" + region);
assertNotNull(region);
CacheServer server = cache.addCacheServer();
@@ -1477,7 +1477,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
factory.setDataPolicy(policy);
assertNotNull(cache);
region = cache.createRegion(REGION_NAME, factory.create());
- LogWriterSupport.getLogWriter().info("Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Region Created :" + region);
assertNotNull(region);
}
@@ -1497,7 +1497,7 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
- LogWriterSupport.getLogWriter().info("Created Cache on peer");
+ LogWriterUtils.getLogWriter().info("Created Cache on peer");
assertNotNull(cache);
FunctionService.registerFunction(function);
}
@@ -1542,11 +1542,11 @@ public class DistributedRegionFunctionExecutionDUnitTest extends
};
Wait.waitForCriterion(wc, 2000, 500, false);
Collection bridgeServers = cache.getCacheServers();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Start Server Bridge Servers list : " + bridgeServers.size());
Iterator bridgeIterator = bridgeServers.iterator();
CacheServer bridgeServer = (CacheServer)bridgeIterator.next();
- LogWriterSupport.getLogWriter().info("start Server Bridge Server" + bridgeServer);
+ LogWriterUtils.getLogWriter().info("start Server Bridge Server" + bridgeServer);
try {
bridgeServer.start();
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/FunctionServiceStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/FunctionServiceStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/FunctionServiceStatsDUnitTest.java
index f9c45e7..9edb736 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/FunctionServiceStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/FunctionServiceStatsDUnitTest.java
@@ -54,7 +54,7 @@ import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -263,7 +263,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception : " + e.getMessage());
e.printStackTrace();
fail("Test failed after the put operation");
}
@@ -287,9 +287,9 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
.getFunctionExecutionsCompleted());
assertTrue(functionServiceStats.getResultsReceived() >= resultReceived_Aggregate);
- LogWriterSupport.getLogWriter().info("Calling FunctionStats for TEST_FUNCTION2 :");
+ LogWriterUtils.getLogWriter().info("Calling FunctionStats for TEST_FUNCTION2 :");
FunctionStats functionStats = FunctionStats.getFunctionStats(TestFunction.TEST_FUNCTION2, iDS);
- LogWriterSupport.getLogWriter().info("Called FunctionStats for TEST_FUNCTION2 :");
+ LogWriterUtils.getLogWriter().info("Called FunctionStats for TEST_FUNCTION2 :");
assertEquals(noOfExecutionCalls_TESTFUNCTION2, functionStats
.getFunctionExecutionCalls());
assertEquals(noOfExecutionsCompleted_TESTFUNCTION2, functionStats
@@ -380,14 +380,14 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
- LogWriterSupport.getLogWriter().info("Created Cache on Server");
+ LogWriterUtils.getLogWriter().info("Created Cache on Server");
assertNotNull(cache);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
assertNotNull(cache);
Region region = cache.createRegion(regionName, factory.create());
- LogWriterSupport.getLogWriter().info("Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Region Created :" + region);
assertNotNull(region);
for (int i = 1; i <= 200; i++) {
region.put("execKey-" + i, new Integer(i));
@@ -429,7 +429,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
- LogWriterSupport.getLogWriter().info("Created Cache on Client");
+ LogWriterUtils.getLogWriter().info("Created Cache on Client");
assertNotNull(cache);
@@ -454,7 +454,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
factory.setPoolName(p.getName());
assertNotNull(cache);
Region region = cache.createRegion(regionName, factory.create());
- LogWriterSupport.getLogWriter().info("Client Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Client Region Created :" + region);
assertNotNull(region);
for (int i = 1; i <= 200; i++) {
region.put("execKey-" + i, new Integer(i));
@@ -605,7 +605,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operation nn TRUE");
}
function = new TestFunction(true, TestFunction.TEST_FUNCTION5);
@@ -627,7 +627,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
}
catch (Exception ex) {
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Exception : ", ex);
+ LogWriterUtils.getLogWriter().info("Exception : ", ex);
fail("Test failed after the execute operationssssss");
}
return Boolean.TRUE;
@@ -975,7 +975,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.EMPTY);
Region region = getCache().createRegion(rName, factory.create());
- LogWriterSupport.getLogWriter().info("Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Region Created :" + region);
assertNotNull(region);
FunctionService.registerFunction(new TestFunction(true, TestFunction.TEST_FUNCTION2));
for (int i = 1; i <= 200; i++) {
@@ -993,7 +993,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
Region region = getCache().createRegion(rName, factory.create());
- LogWriterSupport.getLogWriter().info("Region Created :" + region);
+ LogWriterUtils.getLogWriter().info("Region Created :" + region);
assertNotNull(region);
FunctionService.registerFunction(new TestFunction(true, TestFunction.TEST_FUNCTION2));
for (int i = 1; i <= 200; i++) {
@@ -1147,7 +1147,7 @@ public class FunctionServiceStatsDUnitTest extends PRClientServerTestBase{
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : " + e.getMessage());
e.printStackTrace();
Assert.fail("Test failed", e);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalDataSetDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalDataSetDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalDataSetDUnitTest.java
index be26687..6d582ed 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalDataSetDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalDataSetDUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.internal.cache.functions.LocalDataSetFunction;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -143,7 +143,7 @@ public class LocalDataSetDUnitTest extends CacheTestCase {
Region localRegion = PartitionRegionHelper.getLocalDataForContext((RegionFunctionContext)context);
Iterator it = localRegion.keySet().iterator();
while (it.hasNext()) {
- LogWriterSupport.getLogWriter().info("LocalKeys:"+it.next());
+ LogWriterUtils.getLogWriter().info("LocalKeys:"+it.next());
}
context.getResultSender().lastResult(Boolean.TRUE);
}
@@ -304,7 +304,7 @@ public class LocalDataSetDUnitTest extends CacheTestCase {
if (partitionedRegionName.equals("CustomerPR")) {
customerPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(customerPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + customerPR);
@@ -312,7 +312,7 @@ public class LocalDataSetDUnitTest extends CacheTestCase {
if (partitionedRegionName.equals("OrderPR")) {
orderPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(orderPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + orderPR);
@@ -321,7 +321,7 @@ public class LocalDataSetDUnitTest extends CacheTestCase {
if (partitionedRegionName.equals("ShipmentPR")) {
shipmentPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(shipmentPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + shipmentPR);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalFunctionExecutionDUnitTest.java
index 9ffbb61..9d68f8f 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/LocalFunctionExecutionDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -116,7 +116,7 @@ public class LocalFunctionExecutionDUnitTest extends DistributedTestCase{
region = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + region);
}
@@ -129,7 +129,7 @@ public class LocalFunctionExecutionDUnitTest extends DistributedTestCase{
assertNotNull(cache);
region = cache.createRegion(distributedRegionName, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + distributedRegionName
+ " created Successfully :" + region);
}
[33/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
Cleanup
Project: http://git-wip-us.apache.org/repos/asf/incubator-geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-geode/commit/c05f6798
Tree: http://git-wip-us.apache.org/repos/asf/incubator-geode/tree/c05f6798
Diff: http://git-wip-us.apache.org/repos/asf/incubator-geode/diff/c05f6798
Branch: refs/heads/feature/GEODE-773-2
Commit: c05f6798d70b478a08df1819d6f5f7d1d3fc9ffe
Parents: b17027b
Author: Kirk Lund <kl...@pivotal.io>
Authored: Thu Feb 4 16:15:25 2016 -0800
Committer: Kirk Lund <kl...@pivotal.io>
Committed: Thu Feb 4 16:15:25 2016 -0800
----------------------------------------------------------------------
.../cache/CacheRegionClearStatsDUnitTest.java | 6 +-
.../cache/ClientServerTimeSyncDUnitTest.java | 20 +-
.../cache/ConnectionPoolAndLoaderDUnitTest.java | 10 +-
.../internal/AutoConnectionSourceDUnitTest.java | 40 +--
.../internal/LocatorLoadBalancingDUnitTest.java | 44 +--
.../cache/client/internal/LocatorTestBase.java | 10 +-
.../pooling/ConnectionManagerJUnitTest.java | 6 +-
.../management/MemoryThresholdsDUnitTest.java | 24 +-
.../MemoryThresholdsOffHeapDUnitTest.java | 14 +-
.../mapInterface/PutAllGlobalLockJUnitTest.java | 4 +-
.../PartitionRegionHelperDUnitTest.java | 12 +-
.../query/dunit/CompactRangeIndexDUnitTest.java | 4 +-
.../cache/query/dunit/HashIndexDUnitTest.java | 4 +-
.../cache/query/dunit/HelperTestCase.java | 4 +-
.../query/dunit/PdxStringQueryDUnitTest.java | 204 ++++++------
.../dunit/QueryDataInconsistencyDUnitTest.java | 18 +-
.../dunit/QueryIndexUsingXMLDUnitTest.java | 46 +--
.../QueryParamsAuthorizationDUnitTest.java | 4 +-
.../QueryUsingFunctionContextDUnitTest.java | 8 +-
.../query/dunit/QueryUsingPoolDUnitTest.java | 112 +++----
.../cache/query/dunit/RemoteQueryDUnitTest.java | 44 +--
...esourceManagerWithQueryMonitorDUnitTest.java | 15 +-
.../query/dunit/SelectStarQueryDUnitTest.java | 50 +--
.../IndexCreationDeadLockJUnitTest.java | 6 +-
.../functional/LikePredicateJUnitTest.java | 4 +-
.../internal/ExecutionContextJUnitTest.java | 4 +-
.../index/AsynchIndexMaintenanceJUnitTest.java | 4 +-
...rrentIndexInitOnOverflowRegionDUnitTest.java | 29 +-
...ndexOperationsOnOverflowRegionDUnitTest.java | 48 ++-
...pdateWithInplaceObjectModFalseDUnitTest.java | 30 +-
...ConcurrentIndexUpdateWithoutWLDUnitTest.java | 38 +--
.../index/CopyOnReadIndexDUnitTest.java | 8 +-
.../index/IndexCreationInternalsJUnitTest.java | 6 +-
.../index/IndexMaintainceJUnitTest.java | 4 +-
.../IndexTrackingQueryObserverDUnitTest.java | 4 +-
...itializeIndexEntryDestroyQueryDUnitTest.java | 18 +-
.../index/MultiIndexCreationDUnitTest.java | 14 +-
.../index/PutAllWithIndexPerfDUnitTest.java | 8 +-
.../PRBasicIndexCreationDUnitTest.java | 104 +++---
.../PRBasicIndexCreationDeadlockDUnitTest.java | 4 +-
.../PRBasicMultiIndexCreationDUnitTest.java | 86 ++---
.../partitioned/PRBasicQueryDUnitTest.java | 34 +-
.../PRBasicRemoveIndexDUnitTest.java | 8 +-
.../PRColocatedEquiJoinDUnitTest.java | 300 ++++++++---------
.../partitioned/PRInvalidQueryDUnitTest.java | 18 +-
.../partitioned/PRQueryCacheCloseDUnitTest.java | 68 ++--
.../PRQueryCacheClosedJUnitTest.java | 6 +-
.../query/partitioned/PRQueryDUnitHelper.java | 144 ++++-----
.../query/partitioned/PRQueryDUnitTest.java | 154 ++++-----
.../query/partitioned/PRQueryPerfDUnitTest.java | 10 +-
.../PRQueryRegionCloseDUnitTest.java | 34 +-
.../PRQueryRegionDestroyedDUnitTest.java | 36 +--
.../PRQueryRegionDestroyedJUnitTest.java | 6 +-
.../PRQueryRemoteNodeExceptionDUnitTest.java | 136 ++++----
.../snapshot/SnapshotByteArrayDUnitTest.java | 8 +-
.../snapshot/SnapshotPerformanceDUnitTest.java | 18 +-
.../gemfire/cache30/Bug35214DUnitTest.java | 8 +-
.../gemfire/cache30/Bug38741DUnitTest.java | 4 +-
.../gemfire/cache30/CacheMapTxnDUnitTest.java | 7 +-
...cheRegionsReliablityStatsCheckDUnitTest.java | 4 +-
.../gemstone/gemfire/cache30/CacheTestCase.java | 4 +-
.../gemfire/cache30/CacheXml30DUnitTest.java | 4 +-
.../gemfire/cache30/CacheXml57DUnitTest.java | 4 +-
.../cache30/ClearMultiVmCallBkDUnitTest.java | 14 +-
.../gemfire/cache30/ClearMultiVmDUnitTest.java | 9 +-
.../cache30/ClientMembershipDUnitTest.java | 132 ++++----
.../ClientRegisterInterestDUnitTest.java | 42 +--
.../cache30/ClientServerCCEDUnitTest.java | 44 +--
.../gemfire/cache30/ClientServerTestCase.java | 4 +-
.../ConcurrentLeaveDuringGIIDUnitTest.java | 4 +-
.../gemfire/cache30/DiskRegionDUnitTest.java | 16 +-
.../cache30/DistAckMapMethodsDUnitTest.java | 6 +-
.../cache30/DistributedAckRegionDUnitTest.java | 4 +-
.../DistributedNoAckRegionCCEDUnitTest.java | 10 +-
.../DistributedNoAckRegionDUnitTest.java | 32 +-
.../gemfire/cache30/DynamicRegionDUnitTest.java | 16 +-
.../gemfire/cache30/GlobalLockingDUnitTest.java | 8 +-
.../gemfire/cache30/GlobalRegionDUnitTest.java | 14 +-
.../cache30/LRUEvictionControllerDUnitTest.java | 4 +-
.../gemfire/cache30/MultiVMRegionTestCase.java | 134 ++++----
.../PRBucketSynchronizationDUnitTest.java | 20 +-
.../cache30/PartitionedRegionDUnitTest.java | 2 +-
.../cache30/PutAllCallBkRemoteVMDUnitTest.java | 24 +-
.../cache30/PutAllCallBkSingleVMDUnitTest.java | 20 +-
.../cache30/RRSynchronizationDUnitTest.java | 20 +-
.../gemfire/cache30/ReconnectDUnitTest.java | 106 +++---
.../cache30/RegionExpirationDUnitTest.java | 10 +-
.../RegionMembershipListenerDUnitTest.java | 16 +-
.../cache30/RegionReliabilityTestCase.java | 5 +-
.../gemfire/cache30/RegionTestCase.java | 10 +-
.../gemfire/cache30/RequiredRolesDUnitTest.java | 14 +-
.../cache30/RolePerformanceDUnitTest.java | 6 +-
.../gemfire/cache30/SearchAndLoadDUnitTest.java | 12 +-
.../gemfire/cache30/SlowRecDUnitTest.java | 102 +++---
.../gemfire/cache30/TXDistributedDUnitTest.java | 4 +-
.../gemfire/cache30/TXOrderDUnitTest.java | 6 +-
.../distributed/DistributedSystemDUnitTest.java | 14 +-
.../gemfire/distributed/LocatorDUnitTest.java | 132 ++++----
.../distributed/SystemAdminDUnitTest.java | 8 +-
.../ConsoleDistributionManagerDUnitTest.java | 12 +-
.../internal/DistributionManagerDUnitTest.java | 14 +-
.../GemFireDeadlockDetectorDUnitTest.java | 6 +-
.../internal/locks/CollaborationJUnitTest.java | 18 +-
.../gemfire/disttx/DistTXDebugDUnitTest.java | 110 +++----
.../disttx/DistTXPersistentDebugDUnitTest.java | 10 +-
.../disttx/DistributedTransactionDUnitTest.java | 12 +-
.../ClassNotFoundExceptionDUnitTest.java | 4 +-
.../gemfire/internal/JSSESocketJUnitTest.java | 4 +-
.../internal/PdxDeleteFieldDUnitTest.java | 4 +-
.../gemfire/internal/PdxRenameDUnitTest.java | 4 +-
.../gemfire/internal/cache/BackupDUnitTest.java | 46 +--
.../internal/cache/Bug33359DUnitTest.java | 8 +-
.../internal/cache/Bug37241DUnitTest.java | 10 +-
.../internal/cache/Bug37377DUnitTest.java | 5 +-
.../internal/cache/Bug39079DUnitTest.java | 4 +-
.../internal/cache/Bug41091DUnitTest.java | 12 +-
.../internal/cache/Bug41957DUnitTest.java | 8 +-
.../internal/cache/Bug45164DUnitTest.java | 4 +-
.../internal/cache/Bug47667DUnitTest.java | 4 +-
.../internal/cache/ClearDAckDUnitTest.java | 24 +-
.../internal/cache/ClearGlobalDUnitTest.java | 12 +-
.../cache/ClientServerGetAllDUnitTest.java | 24 +-
...ServerInvalidAndDestroyedEntryDUnitTest.java | 24 +-
.../ClientServerTransactionCCEDUnitTest.java | 4 +-
.../cache/ClientServerTransactionDUnitTest.java | 54 ++--
.../cache/ConcurrentMapOpsDUnitTest.java | 4 +-
.../ConcurrentRegionOperationsJUnitTest.java | 18 +-
...rentRollingAndRegionOperationsJUnitTest.java | 4 +-
.../cache/ConnectDisconnectDUnitTest.java | 10 +-
.../cache/DeltaPropagationDUnitTest.java | 2 +-
.../cache/DeltaPropagationStatsDUnitTest.java | 10 +-
.../internal/cache/DeltaSizingDUnitTest.java | 6 +-
.../cache/DiskRegionClearJUnitTest.java | 4 +-
.../internal/cache/DiskRegionJUnitTest.java | 38 +--
.../cache/DistributedCacheTestCase.java | 12 +-
.../internal/cache/EventTrackerDUnitTest.java | 8 +-
.../internal/cache/EvictionDUnitTest.java | 10 +-
.../cache/EvictionObjectSizerDUnitTest.java | 18 +-
.../internal/cache/EvictionStatsDUnitTest.java | 18 +-
.../internal/cache/EvictionTestBase.java | 20 +-
.../cache/FixedPRSinglehopDUnitTest.java | 14 +-
.../internal/cache/GIIDeltaDUnitTest.java | 22 +-
.../internal/cache/GridAdvisorDUnitTest.java | 26 +-
.../internal/cache/HABug36773DUnitTest.java | 10 +-
.../HAOverflowMemObjectSizerDUnitTest.java | 6 +-
.../cache/IncrementalBackupDUnitTest.java | 6 +-
.../internal/cache/MapClearGIIDUnitTest.java | 14 +-
.../internal/cache/MapInterface2JUnitTest.java | 6 +-
.../cache/NetSearchMessagingDUnitTest.java | 10 +-
.../cache/OffHeapEvictionDUnitTest.java | 10 +-
.../cache/OffHeapEvictionStatsDUnitTest.java | 10 +-
.../gemfire/internal/cache/OplogJUnitTest.java | 10 +-
.../cache/PartitionedRegionAPIDUnitTest.java | 24 +-
...gionBucketCreationDistributionDUnitTest.java | 88 ++---
.../PartitionedRegionCacheCloseDUnitTest.java | 10 +-
.../PartitionedRegionCreationDUnitTest.java | 52 +--
.../cache/PartitionedRegionDUnitTestCase.java | 4 +-
.../PartitionedRegionDestroyDUnitTest.java | 18 +-
.../cache/PartitionedRegionHADUnitTest.java | 18 +-
...onedRegionHAFailureAndRecoveryDUnitTest.java | 42 +--
...artitionedRegionLocalMaxMemoryDUnitTest.java | 6 +-
.../PartitionedRegionMultipleDUnitTest.java | 58 ++--
.../cache/PartitionedRegionPRIDDUnitTest.java | 18 +-
.../cache/PartitionedRegionQueryDUnitTest.java | 12 +-
...tionedRegionSerializableObjectJUnitTest.java | 4 +-
.../PartitionedRegionSingleHopDUnitTest.java | 78 ++---
...RegionSingleHopWithServerGroupDUnitTest.java | 104 +++---
.../cache/PartitionedRegionSizeDUnitTest.java | 18 +-
.../PartitionedRegionTestUtilsDUnitTest.java | 10 +-
.../PartitionedRegionWithSameNameDUnitTest.java | 44 +--
.../internal/cache/PutAllDAckDUnitTest.java | 6 +-
.../internal/cache/PutAllGlobalDUnitTest.java | 28 +-
.../cache/RemoteTransactionDUnitTest.java | 14 +-
.../internal/cache/RemoveAllDAckDUnitTest.java | 6 +-
.../internal/cache/RemoveDAckDUnitTest.java | 4 +-
.../internal/cache/RemoveGlobalDUnitTest.java | 10 +-
.../cache/SimpleDiskRegionJUnitTest.java | 12 +-
.../internal/cache/SingleHopStatsDUnitTest.java | 24 +-
.../internal/cache/SystemFailureDUnitTest.java | 20 +-
.../cache/TXReservationMgrJUnitTest.java | 4 +-
.../cache/TransactionsWithDeltaDUnitTest.java | 12 +-
.../control/RebalanceOperationDUnitTest.java | 12 +-
...ltiThreadedOplogPerJUnitPerformanceTest.java | 4 +-
.../cache/execute/Bug51193DUnitTest.java | 4 +-
.../ClientServerFunctionExecutionDUnitTest.java | 66 ++--
.../execute/ColocationFailoverDUnitTest.java | 10 +-
...ributedRegionFunctionExecutionDUnitTest.java | 24 +-
.../execute/FunctionServiceStatsDUnitTest.java | 26 +-
.../cache/execute/LocalDataSetDUnitTest.java | 10 +-
.../LocalFunctionExecutionDUnitTest.java | 6 +-
.../MemberFunctionExecutionDUnitTest.java | 22 +-
.../OnGroupsFunctionExecutionDUnitTest.java | 44 +--
...ntServerFunctionExecutionNoAckDUnitTest.java | 36 +--
...tServerRegionFunctionExecutionDUnitTest.java | 46 +--
...egionFunctionExecutionFailoverDUnitTest.java | 36 +--
...onFunctionExecutionNoSingleHopDUnitTest.java | 40 +--
...onExecutionSelectorNoSingleHopDUnitTest.java | 40 +--
...gionFunctionExecutionSingleHopDUnitTest.java | 40 +--
.../cache/execute/PRClientServerTestBase.java | 30 +-
.../cache/execute/PRColocationDUnitTest.java | 82 ++---
.../execute/PRCustomPartitioningDUnitTest.java | 6 +-
.../execute/PRFunctionExecutionDUnitTest.java | 24 +-
...ctionExecutionWithResultSenderDUnitTest.java | 4 +-
.../execute/PRPerformanceTestDUnitTest.java | 10 +-
.../cache/execute/PRTransactionDUnitTest.java | 30 +-
.../ha/BlockingHARQAddOperationJUnitTest.java | 8 +-
.../cache/ha/BlockingHARegionJUnitTest.java | 42 +--
.../cache/ha/Bug36853EventsExpiryDUnitTest.java | 18 +-
.../internal/cache/ha/Bug48571DUnitTest.java | 4 +-
.../cache/ha/EventIdOptimizationDUnitTest.java | 18 +-
.../internal/cache/ha/FailoverDUnitTest.java | 4 +-
.../internal/cache/ha/HABugInPutDUnitTest.java | 6 +-
.../internal/cache/ha/HAClearDUnitTest.java | 18 +-
.../cache/ha/HAConflationDUnitTest.java | 4 +-
.../internal/cache/ha/HADuplicateDUnitTest.java | 4 +-
.../cache/ha/HAEventIdPropagationDUnitTest.java | 16 +-
.../internal/cache/ha/HAGIIBugDUnitTest.java | 20 +-
.../internal/cache/ha/HAGIIDUnitTest.java | 4 +-
.../cache/ha/HARQAddOperationJUnitTest.java | 24 +-
.../cache/ha/HARQueueNewImplDUnitTest.java | 78 ++---
.../cache/ha/HARegionQueueDUnitTest.java | 8 +-
.../cache/ha/HARegionQueueJUnitTest.java | 26 +-
.../cache/ha/HASlowReceiverDUnitTest.java | 4 +-
.../ha/OperationsPropagationDUnitTest.java | 4 +-
.../internal/cache/ha/PutAllDUnitTest.java | 12 +-
.../internal/cache/ha/StatsBugDUnitTest.java | 20 +-
.../cache/locks/TXLockServiceDUnitTest.java | 42 +--
.../cache/partitioned/Bug39356DUnitTest.java | 2 +-
.../cache/partitioned/Bug43684DUnitTest.java | 4 +-
.../cache/partitioned/Bug51400DUnitTest.java | 8 +-
.../partitioned/PersistPRKRFDUnitTest.java | 9 +-
.../PersistentPartitionedRegionDUnitTest.java | 12 +-
.../PersistentPartitionedRegionTestBase.java | 8 +-
...rtitionedRegionWithTransactionDUnitTest.java | 8 +-
.../fixed/FixedPartitioningTestBase.java | 50 +--
.../PersistentRVVRecoveryDUnitTest.java | 8 +-
.../PersistentRecoveryOrderDUnitTest.java | 92 +++---
...mpatibilityHigherVersionClientDUnitTest.java | 4 +-
.../cache/tier/sockets/Bug36269DUnitTest.java | 6 +-
.../cache/tier/sockets/Bug36457DUnitTest.java | 10 +-
.../cache/tier/sockets/Bug36805DUnitTest.java | 6 +-
.../cache/tier/sockets/Bug36829DUnitTest.java | 4 +-
.../cache/tier/sockets/Bug36995DUnitTest.java | 8 +-
.../cache/tier/sockets/Bug37210DUnitTest.java | 12 +-
.../cache/tier/sockets/Bug37805DUnitTest.java | 4 +-
.../cache/tier/sockets/CacheServerTestUtil.java | 18 +-
.../CacheServerTransactionsDUnitTest.java | 48 +--
.../tier/sockets/ClearPropagationDUnitTest.java | 14 +-
.../tier/sockets/ClientConflationDUnitTest.java | 10 +-
.../sockets/ClientInterestNotifyDUnitTest.java | 14 +-
.../tier/sockets/ClientServerMiscDUnitTest.java | 34 +-
.../cache/tier/sockets/ConflationDUnitTest.java | 32 +-
.../DataSerializerPropogationDUnitTest.java | 43 +--
.../DestroyEntryPropagationDUnitTest.java | 16 +-
.../sockets/DurableClientBug39997DUnitTest.java | 4 +-
.../DurableClientQueueSizeDUnitTest.java | 4 +-
.../DurableClientReconnectAutoDUnitTest.java | 6 +-
.../DurableClientReconnectDUnitTest.java | 56 ++--
.../sockets/DurableClientStatsDUnitTest.java | 10 +-
.../sockets/DurableRegistrationDUnitTest.java | 22 +-
.../sockets/DurableResponseMatrixDUnitTest.java | 4 +-
.../sockets/EventIDVerificationDUnitTest.java | 4 +-
.../ForceInvalidateEvictionDUnitTest.java | 4 +-
.../cache/tier/sockets/HABug36738DUnitTest.java | 8 +-
.../tier/sockets/HAInterestPart1DUnitTest.java | 18 +-
.../tier/sockets/HAInterestPart2DUnitTest.java | 18 +-
.../cache/tier/sockets/HAInterestTestCase.java | 5 +-
.../sockets/HAStartupAndFailoverDUnitTest.java | 20 +-
.../InstantiatorPropagationDUnitTest.java | 36 +--
.../tier/sockets/InterestListDUnitTest.java | 46 +--
.../sockets/InterestListEndpointDUnitTest.java | 4 +-
.../sockets/InterestListFailoverDUnitTest.java | 6 +-
.../sockets/InterestListRecoveryDUnitTest.java | 8 +-
.../sockets/InterestRegrListenerDUnitTest.java | 48 +--
.../sockets/InterestResultPolicyDUnitTest.java | 10 +-
.../sockets/RedundancyLevelPart1DUnitTest.java | 26 +-
.../sockets/RedundancyLevelPart2DUnitTest.java | 24 +-
.../sockets/RedundancyLevelPart3DUnitTest.java | 8 +-
.../tier/sockets/RedundancyLevelTestBase.java | 4 +-
.../tier/sockets/RegionCloseDUnitTest.java | 4 +-
...erInterestBeforeRegionCreationDUnitTest.java | 6 +-
.../sockets/RegisterInterestKeysDUnitTest.java | 10 +-
.../sockets/ReliableMessagingDUnitTest.java | 22 +-
.../sockets/UnregisterInterestDUnitTest.java | 4 +-
.../sockets/UpdatePropagationDUnitTest.java | 10 +-
...UpdatesFromNonInterestEndPointDUnitTest.java | 4 +-
.../versions/RegionVersionVectorJUnitTest.java | 4 +-
.../cache/wan/AsyncEventQueueTestBase.java | 20 +-
.../asyncqueue/AsyncEventListenerDUnitTest.java | 42 +--
.../CompressionCacheConfigDUnitTest.java | 8 +-
.../CompressionRegionConfigDUnitTest.java | 14 +-
.../internal/jta/dunit/ExceptionsDUnitTest.java | 12 +-
.../jta/dunit/IdleTimeOutDUnitTest.java | 50 +--
.../jta/dunit/LoginTimeOutDUnitTest.java | 7 +-
.../jta/dunit/MaxPoolSizeDUnitTest.java | 36 +--
.../jta/dunit/TransactionTimeOutDUnitTest.java | 14 +-
.../dunit/TxnManagerMultiThreadDUnitTest.java | 70 ++--
.../internal/jta/dunit/TxnTimeOutDUnitTest.java | 26 +-
.../logging/MergeLogFilesJUnitTest.java | 4 +-
.../management/CacheManagementDUnitTest.java | 14 +-
.../management/ClientHealthStatsDUnitTest.java | 14 +-
.../management/DLockManagementDUnitTest.java | 12 +-
.../management/DiskManagementDUnitTest.java | 28 +-
.../management/DistributedSystemDUnitTest.java | 20 +-
.../management/LocatorManagementDUnitTest.java | 12 +-
.../gemstone/gemfire/management/MBeanUtil.java | 4 +-
.../gemfire/management/ManagementTestBase.java | 12 +-
.../MemberMBeanAttributesDUnitTest.java | 4 +-
.../gemfire/management/QueryDataDUnitTest.java | 18 +-
.../management/RegionManagementDUnitTest.java | 44 +--
...ersalMembershipListenerAdapterDUnitTest.java | 140 ++++----
.../internal/cli/CliUtilDUnitTest.java | 26 +-
.../cli/commands/CliCommandTestBase.java | 10 +-
.../cli/commands/ConfigCommandsDUnitTest.java | 26 +-
...eateAlterDestroyRegionCommandsDUnitTest.java | 34 +-
.../commands/DiskStoreCommandsDUnitTest.java | 24 +-
.../cli/commands/FunctionCommandsDUnitTest.java | 52 +--
.../commands/GemfireDataCommandsDUnitTest.java | 168 +++++-----
...WithCacheLoaderDuringCacheMissDUnitTest.java | 4 +-
.../cli/commands/IndexCommandsDUnitTest.java | 6 +-
...stAndDescribeDiskStoreCommandsDUnitTest.java | 8 +-
.../ListAndDescribeRegionDUnitTest.java | 30 +-
.../cli/commands/ListIndexCommandDUnitTest.java | 16 +-
.../cli/commands/MemberCommandsDUnitTest.java | 14 +-
.../MiscellaneousCommandsDUnitTest.java | 22 +-
...laneousCommandsExportLogsPart1DUnitTest.java | 10 +-
...laneousCommandsExportLogsPart2DUnitTest.java | 10 +-
...laneousCommandsExportLogsPart3DUnitTest.java | 10 +-
...laneousCommandsExportLogsPart4DUnitTest.java | 10 +-
.../cli/commands/QueueCommandsDUnitTest.java | 4 +-
.../SharedConfigurationCommandsDUnitTest.java | 14 +-
.../cli/commands/ShellCommandsDUnitTest.java | 16 +-
.../cli/commands/ShowDeadlockDUnitTest.java | 6 +-
.../cli/commands/ShowMetricsDUnitTest.java | 24 +-
.../cli/commands/ShowStackTraceDUnitTest.java | 22 +-
.../internal/pulse/TestClientIdsDUnitTest.java | 12 +-
.../internal/pulse/TestFunctionsDUnitTest.java | 4 +-
.../internal/pulse/TestHeapDUnitTest.java | 4 +-
.../internal/pulse/TestLocatorsDUnitTest.java | 4 +-
.../pulse/TestSubscriptionsDUnitTest.java | 10 +-
.../ClientsWithVersioningRetryDUnitTest.java | 16 +-
.../pdx/JSONPdxClientServerDUnitTest.java | 6 +-
.../gemfire/pdx/PdxClientServerDUnitTest.java | 12 +-
.../gemfire/pdx/PdxTypeExportDUnitTest.java | 4 +-
.../gemfire/pdx/VersionClassLoader.java | 10 +-
.../gemfire/redis/RedisDistDUnitTest.java | 8 +-
.../security/ClientAuthenticationDUnitTest.java | 90 +++---
.../security/ClientAuthorizationDUnitTest.java | 56 ++--
.../security/ClientAuthorizationTestBase.java | 22 +-
.../security/ClientMultiUserAuthzDUnitTest.java | 36 +--
.../DeltaClientAuthorizationDUnitTest.java | 36 +--
.../DeltaClientPostAuthorizationDUnitTest.java | 18 +-
.../security/P2PAuthenticationDUnitTest.java | 32 +-
.../gemfire/security/SecurityTestUtil.java | 188 +++++------
.../com/gemstone/gemfire/test/dunit/Assert.java | 32 +-
.../gemfire/test/dunit/AsyncInvocation.java | 17 +-
.../gemstone/gemfire/test/dunit/DUnitEnv.java | 1 -
.../gemfire/test/dunit/DebuggerSupport.java | 33 --
.../gemfire/test/dunit/DebuggerUtils.java | 52 +++
.../test/dunit/DistributedSystemSupport.java | 82 -----
.../gemfire/test/dunit/DistributedTestCase.java | 15 +-
.../test/dunit/DistributedTestSupport.java | 106 ------
.../test/dunit/DistributedTestUtils.java | 169 ++++++++++
.../com/gemstone/gemfire/test/dunit/Host.java | 5 +-
.../gemfire/test/dunit/IgnoredException.java | 178 +++++-----
.../com/gemstone/gemfire/test/dunit/Invoke.java | 102 +++---
.../gemfire/test/dunit/LogWriterSupport.java | 72 -----
.../gemfire/test/dunit/LogWriterUtils.java | 95 ++++++
.../gemfire/test/dunit/NetworkSupport.java | 48 ---
.../gemfire/test/dunit/NetworkUtils.java | 69 ++++
.../gemfire/test/dunit/RMIException.java | 2 +-
.../gemfire/test/dunit/RepeatableRunnable.java | 4 +-
.../test/dunit/SerializableCallableIF.java | 4 +-
.../test/dunit/SerializableRunnable.java | 3 +-
.../test/dunit/SerializableRunnableIF.java | 4 +-
.../test/dunit/StoppableWaitCriterion.java | 12 +-
.../gemfire/test/dunit/ThreadUtils.java | 155 +++++++++
.../gemstone/gemfire/test/dunit/Threads.java | 138 --------
.../com/gemstone/gemfire/test/dunit/VM.java | 50 +--
.../com/gemstone/gemfire/test/dunit/Wait.java | 246 +++++++-------
.../gemfire/test/dunit/WaitCriterion.java | 13 +-
.../cache/query/cq/dunit/CqDataDUnitTest.java | 62 ++--
.../cq/dunit/CqDataUsingPoolDUnitTest.java | 54 ++--
.../cache/query/cq/dunit/CqPerfDUnitTest.java | 44 +--
.../cq/dunit/CqPerfUsingPoolDUnitTest.java | 42 +--
.../cache/query/cq/dunit/CqQueryDUnitTest.java | 206 ++++++------
.../dunit/CqQueryOptimizedExecuteDUnitTest.java | 14 +-
.../cq/dunit/CqQueryUsingPoolDUnitTest.java | 166 +++++-----
.../cq/dunit/CqResultSetUsingPoolDUnitTest.java | 56 ++--
...ltSetUsingPoolOptimizedExecuteDUnitTest.java | 14 +-
.../cache/query/cq/dunit/CqStateDUnitTest.java | 9 +-
.../cache/query/cq/dunit/CqStatsDUnitTest.java | 12 +-
.../cq/dunit/CqStatsUsingPoolDUnitTest.java | 12 +-
.../PartitionedRegionCqQueryDUnitTest.java | 58 ++--
...dRegionCqQueryOptimizedExecuteDUnitTest.java | 12 +-
.../query/cq/dunit/PrCqUsingPoolDUnitTest.java | 56 ++--
.../cache/query/dunit/PdxQueryCQDUnitTest.java | 32 +-
.../cache/query/dunit/PdxQueryCQTestBase.java | 18 +-
.../dunit/QueryIndexUpdateRIDUnitTest.java | 46 +--
.../query/dunit/QueryMonitorDUnitTest.java | 38 +--
.../cache/snapshot/ClientSnapshotDUnitTest.java | 8 +-
.../cache/PRDeltaPropagationDUnitTest.java | 4 +-
.../internal/cache/PutAllCSDUnitTest.java | 324 +++++++++----------
.../cache/RemoteCQTransactionDUnitTest.java | 4 +-
.../internal/cache/ha/CQListGIIDUnitTest.java | 30 +-
.../cache/ha/HADispatcherDUnitTest.java | 12 +-
.../sockets/ClientToServerDeltaDUnitTest.java | 20 +-
.../DeltaPropagationWithCQDUnitTest.java | 10 +-
...ToRegionRelationCQRegistrationDUnitTest.java | 20 +-
.../sockets/DurableClientSimpleDUnitTest.java | 54 ++--
.../tier/sockets/DurableClientTestCase.java | 56 ++--
.../CacheServerManagementDUnitTest.java | 28 +-
.../cli/commands/ClientCommandsDUnitTest.java | 14 +-
.../DurableClientCommandsDUnitTest.java | 12 +-
.../internal/pulse/TestCQDUnitTest.java | 10 +-
.../internal/pulse/TestClientsDUnitTest.java | 8 +-
.../internal/pulse/TestServerDUnitTest.java | 4 +-
.../security/ClientAuthzObjectModDUnitTest.java | 10 +-
.../ClientCQPostAuthorizationDUnitTest.java | 28 +-
.../ClientPostAuthorizationDUnitTest.java | 16 +-
.../gemfire/security/MultiuserAPIDUnitTest.java | 12 +-
.../MultiuserDurableCQAuthzDUnitTest.java | 20 +-
.../internal/cache/UpdateVersionDUnitTest.java | 10 +-
.../gemfire/internal/cache/wan/WANTestBase.java | 68 ++--
...oncurrentParallelGatewaySenderDUnitTest.java | 4 +-
...allelGatewaySenderOperation_1_DUnitTest.java | 36 +--
...allelGatewaySenderOperation_2_DUnitTest.java | 12 +-
.../ConcurrentWANPropogation_1_DUnitTest.java | 4 +-
.../cache/wan/disttx/DistTXWANDUnitTest.java | 4 +-
.../CommonParallelGatewaySenderDUnitTest.java | 20 +-
...wWANConcurrencyCheckForDestroyDUnitTest.java | 14 +-
...dRegion_ParallelWANPersistenceDUnitTest.java | 50 +--
...dRegion_ParallelWANPropogationDUnitTest.java | 12 +-
...downAllPersistentGatewaySenderDUnitTest.java | 4 +-
.../wan/misc/WANLocatorServerDUnitTest.java | 4 +-
.../wan/misc/WanAutoDiscoveryDUnitTest.java | 4 +-
...arallelGatewaySenderOperationsDUnitTest.java | 28 +-
...llelGatewaySenderQueueOverflowDUnitTest.java | 18 +-
...ersistenceEnabledGatewaySenderDUnitTest.java | 180 +++++------
.../ParallelWANPropagationDUnitTest.java | 4 +-
.../SerialGatewaySenderOperationsDUnitTest.java | 14 +-
...ersistenceEnabledGatewaySenderDUnitTest.java | 62 ++--
.../serial/SerialWANPropogationDUnitTest.java | 40 +--
.../management/WANManagementDUnitTest.java | 4 +-
.../ClusterConfigurationDUnitTest.java | 4 +-
.../pulse/TestRemoteClusterDUnitTest.java | 4 +-
446 files changed, 6265 insertions(+), 6123 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/CacheRegionClearStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/CacheRegionClearStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/CacheRegionClearStatsDUnitTest.java
index 8908abc..17a3dc8 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/CacheRegionClearStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/CacheRegionClearStatsDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
* verifies the count of clear operation
@@ -153,7 +153,7 @@ public class CacheRegionClearStatsDUnitTest extends DistributedTestCase {
client1.invoke(CacheRegionClearStatsDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1 });
client1.invoke(CacheRegionClearStatsDUnitTest.class, "put");
try{
@@ -178,7 +178,7 @@ public class CacheRegionClearStatsDUnitTest extends DistributedTestCase {
client1.invoke(CacheRegionClearStatsDUnitTest.class,
"createClientCacheDisk", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1 });
client1.invoke(CacheRegionClearStatsDUnitTest.class, "put");
try{
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ClientServerTimeSyncDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ClientServerTimeSyncDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ClientServerTimeSyncDUnitTest.java
index 7e21875..8166318 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ClientServerTimeSyncDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ClientServerTimeSyncDUnitTest.java
@@ -31,8 +31,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -63,7 +63,7 @@ public class ClientServerTimeSyncDUnitTest extends CacheTestCase {
public Object call() {
Cache cache = getCache();
cache.createRegionFactory(RegionShortcut.REPLICATE).create(regionName);
- LogWriterSupport.getLogWriter().info("Done creating region, now creating CacheServer");
+ LogWriterUtils.getLogWriter().info("Done creating region, now creating CacheServer");
CacheServer server = null;
try {
server = cache.addCacheServer();
@@ -76,12 +76,12 @@ public class ClientServerTimeSyncDUnitTest extends CacheTestCase {
// now set an artificial time offset for the test
system.getClock().setCacheTimeOffset(null, TEST_OFFSET, true);
- LogWriterSupport.getLogWriter().info("Done creating and starting CacheServer on port " + server.getPort());
+ LogWriterUtils.getLogWriter().info("Done creating and starting CacheServer on port " + server.getPort());
return server.getPort();
}
});
- final String hostName = NetworkSupport.getServerHostName(vm0.getHost());
+ final String hostName = NetworkUtils.getServerHostName(vm0.getHost());
// Start client with proxy region and register interest
@@ -103,7 +103,7 @@ public class ClientServerTimeSyncDUnitTest extends CacheTestCase {
WaitCriterion wc = new WaitCriterion() {
public boolean done() {
long clientTimeOffset = clock.getCacheTimeOffset();
- LogWriterSupport.getLogWriter().info("Client node's new time offset is: " + clientTimeOffset);
+ LogWriterUtils.getLogWriter().info("Client node's new time offset is: " + clientTimeOffset);
return clientTimeOffset >= TEST_OFFSET;
}
public String description() {
@@ -140,7 +140,7 @@ public class ClientServerTimeSyncDUnitTest extends CacheTestCase {
public Object call() {
Cache cache = getCache();
cache.createRegionFactory(RegionShortcut.REPLICATE).create(regionName);
- LogWriterSupport.getLogWriter().info("Done creating region, now creating CacheServer");
+ LogWriterUtils.getLogWriter().info("Done creating region, now creating CacheServer");
CacheServer server = null;
try {
server = cache.addCacheServer();
@@ -153,14 +153,14 @@ public class ClientServerTimeSyncDUnitTest extends CacheTestCase {
// now set an artificial time offset for the test
system.getClock().setCacheTimeOffset(null, -TEST_OFFSET, true);
- LogWriterSupport.getLogWriter().info("Done creating and starting CacheServer on port " + server.getPort());
+ LogWriterUtils.getLogWriter().info("Done creating and starting CacheServer on port " + server.getPort());
return server.getPort();
}
});
Wait.pause((int)TEST_OFFSET); // let cacheTimeMillis consume the time offset
- final String hostName = NetworkSupport.getServerHostName(vm0.getHost());
+ final String hostName = NetworkUtils.getServerHostName(vm0.getHost());
// Start client with proxy region and register interest
@@ -182,7 +182,7 @@ public class ClientServerTimeSyncDUnitTest extends CacheTestCase {
WaitCriterion wc = new WaitCriterion() {
public boolean done() {
long clientTimeOffset = clock.getCacheTimeOffset();
- LogWriterSupport.getLogWriter().info("Client node's new time offset is: " + clientTimeOffset);
+ LogWriterUtils.getLogWriter().info("Client node's new time offset is: " + clientTimeOffset);
if (clientTimeOffset >= 0) {
return false;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ConnectionPoolAndLoaderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ConnectionPoolAndLoaderDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ConnectionPoolAndLoaderDUnitTest.java
index ba6412a..ee382ab 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ConnectionPoolAndLoaderDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/ConnectionPoolAndLoaderDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.cache.util.CacheWriterAdapter;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -94,7 +94,7 @@ public class ConnectionPoolAndLoaderDUnitTest extends CacheTestCase {
public Object call() {
Cache cache = getCache();
PoolFactory factory = PoolManager.createFactory();
- factory.addServer(NetworkSupport.getServerHostName(host), serverPort);
+ factory.addServer(NetworkUtils.getServerHostName(host), serverPort);
factory.create("pool1");
AttributesFactory af = new AttributesFactory();
@@ -173,7 +173,7 @@ public class ConnectionPoolAndLoaderDUnitTest extends CacheTestCase {
public Object call() {
Cache cache = getCache();
PoolFactory factory = PoolManager.createFactory();
- factory.addServer(NetworkSupport.getServerHostName(host), serverPort);
+ factory.addServer(NetworkUtils.getServerHostName(host), serverPort);
factory.create("pool1");
AttributesFactory af = new AttributesFactory();
@@ -283,7 +283,7 @@ public class ConnectionPoolAndLoaderDUnitTest extends CacheTestCase {
Cache cache = getCache();
useLocator = false;
PoolFactory factory = PoolManager.createFactory();
- factory.addServer(NetworkSupport.getServerHostName(host), serverPort);
+ factory.addServer(NetworkUtils.getServerHostName(host), serverPort);
factory.create("pool1");
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.NORMAL);
@@ -305,7 +305,7 @@ public class ConnectionPoolAndLoaderDUnitTest extends CacheTestCase {
Cache cache = getCache();
useLocator = false;
PoolFactory factory = PoolManager.createFactory();
- factory.addServer(NetworkSupport.getServerHostName(host), serverPort);
+ factory.addServer(NetworkUtils.getServerHostName(host), serverPort);
factory.create("pool1");
AttributesFactory af = new AttributesFactory();
af.setDataPolicy(DataPolicy.NORMAL);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/AutoConnectionSourceDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/AutoConnectionSourceDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/AutoConnectionSourceDUnitTest.java
index 374950d..b0f3b59 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/AutoConnectionSourceDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/AutoConnectionSourceDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.management.membership.ClientMembershipEvent;
import com.gemstone.gemfire.management.membership.ClientMembershipListenerAdapter;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -74,11 +74,11 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocatorInVM(vm0, locatorPort, "");
- String locators = NetworkSupport.getServerHostName(vm0.getHost())+ "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(vm0.getHost())+ "[" + locatorPort + "]";
startBridgeServerInVM(vm1, null, locators);
- startBridgeClientInVM(vm2, null, NetworkSupport.getServerHostName(vm0.getHost()), locatorPort);
+ startBridgeClientInVM(vm2, null, NetworkUtils.getServerHostName(vm0.getHost()), locatorPort);
putAndWaitForSuccess(vm2, REGION_NAME, "key", "value");
@@ -91,7 +91,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
VM vm0 = host.getVM(0);
try {
- startBridgeClientInVM(vm0, null, NetworkSupport.getServerHostName(vm0.getHost()), AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET));
+ startBridgeClientInVM(vm0, null, NetworkUtils.getServerHostName(vm0.getHost()), AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET));
putInVM(vm0, "key", "value");
fail("Client cache should not have been able to start");
} catch(Exception e) {
@@ -107,7 +107,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocatorInVM(vm0, locatorPort, "");
try {
- startBridgeClientInVM(vm1, null, NetworkSupport.getServerHostName(vm0.getHost()), locatorPort);
+ startBridgeClientInVM(vm1, null, NetworkUtils.getServerHostName(vm0.getHost()), locatorPort);
putInVM(vm0, "key", "value");
fail("Client cache should not have been able to start");
} catch(Exception e) {
@@ -125,11 +125,11 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocatorInVM(vm0, locatorPort, "");
- String locators = NetworkSupport.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
startBridgeServerInVM(vm1, null, locators);
- startBridgeClientInVM(vm2, null, NetworkSupport.getServerHostName(vm0.getHost()), locatorPort);
+ startBridgeClientInVM(vm2, null, NetworkUtils.getServerHostName(vm0.getHost()), locatorPort);
putAndWaitForSuccess(vm2, REGION_NAME, "key", "value");
@@ -144,7 +144,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
public void testDynamicallyFindLocators() throws Exception {
final Host host = Host.getHost(0);
- final String hostName = NetworkSupport.getServerHostName(host);
+ final String hostName = NetworkUtils.getServerHostName(host);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
@@ -159,7 +159,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
startLocatorInVM(vm0, locatorPort0, locators);
startLocatorInVM(vm1, locatorPort1, locators);
- startBridgeClientInVM(vm2, null, NetworkSupport.getServerHostName(vm0.getHost()), locatorPort0);
+ startBridgeClientInVM(vm2, null, NetworkUtils.getServerHostName(vm0.getHost()), locatorPort0);
InetSocketAddress locatorToWaitFor= new InetSocketAddress(hostName, locatorPort1);
waitForLocatorDiscovery(vm2, locatorToWaitFor);
@@ -190,11 +190,11 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- String locators = NetworkSupport.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
startBridgeServerWithEmbeddedLocator(vm0, null, locators, new String[] {REGION_NAME}, CacheServer.DEFAULT_LOAD_PROBE);
- startBridgeClientInVM(vm2, null, NetworkSupport.getServerHostName(vm0.getHost()), locatorPort);
+ startBridgeClientInVM(vm2, null, NetworkUtils.getServerHostName(vm0.getHost()), locatorPort);
putAndWaitForSuccess(vm2, REGION_NAME, "key", "value");
@@ -226,13 +226,13 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocatorInVM(vm0, locatorPort, "");
- String locators = NetworkSupport.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
startBridgeServerInVM(vm1, new String[] {"group1", "group2"} , locators, new String[] {"A", "B"});
startBridgeServerInVM(vm2, new String[] {"group2", "group3"}, locators, new String[] {"B", "C"});
- startBridgeClientInVM(vm3, "group1", NetworkSupport.getServerHostName(vm0.getHost()), locatorPort, new String [] {"A", "B", "C"});
+ startBridgeClientInVM(vm3, "group1", NetworkUtils.getServerHostName(vm0.getHost()), locatorPort, new String [] {"A", "B", "C"});
putAndWaitForSuccess(vm3, "A", "key", "value");
Assert.assertEquals("value", getInVM(vm1, "A", "key"));
try {
@@ -242,7 +242,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
stopBridgeMemberVM(vm3);
- startBridgeClientInVM(vm3, "group3", NetworkSupport.getServerHostName(vm0.getHost()), locatorPort, new String [] {"A", "B", "C"});
+ startBridgeClientInVM(vm3, "group3", NetworkUtils.getServerHostName(vm0.getHost()), locatorPort, new String [] {"A", "B", "C"});
try {
putInVM(vm3, "A", "key3", "value");
fail("Should not have been able to find Region A on the server");
@@ -252,7 +252,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
stopBridgeMemberVM(vm3);
- startBridgeClientInVM(vm3, "group2", NetworkSupport.getServerHostName(vm0.getHost()), locatorPort, new String [] {"A", "B", "C"});
+ startBridgeClientInVM(vm3, "group2", NetworkUtils.getServerHostName(vm0.getHost()), locatorPort, new String [] {"A", "B", "C"});
putInVM(vm3, "B", "key5", "value");
Assert.assertEquals("value", getInVM(vm1, "B", "key5"));
Assert.assertEquals("value", getInVM(vm2, "B", "key5"));
@@ -278,18 +278,18 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
startLocatorInVM(vm0, locatorPort, "");
- final String locators = NetworkSupport.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
+ final String locators = NetworkUtils.getServerHostName(vm0.getHost()) + "[" + locatorPort + "]";
final int serverPort1 =startBridgeServerInVM(vm1, new String[] {"group1"}, locators);
final int serverPort2 =addCacheServerInVM(vm1, new String[] {"group2"});
- startBridgeClientInVM(vm2, "group2", NetworkSupport.getServerHostName(vm0.getHost()), locatorPort);
+ startBridgeClientInVM(vm2, "group2", NetworkUtils.getServerHostName(vm0.getHost()), locatorPort);
checkEndpoints(vm2, new int[] {serverPort2});
stopBridgeMemberVM(vm2);
- startBridgeClientInVM(vm2, "group1", NetworkSupport.getServerHostName(vm0.getHost()), locatorPort);
+ startBridgeClientInVM(vm2, "group1", NetworkUtils.getServerHostName(vm0.getHost()), locatorPort);
checkEndpoints(vm2, new int[] {serverPort1});
}
@@ -302,7 +302,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
VM clientVM = host.getVM(3);
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocatorInVM(locatorVM, locatorPort, "");
- String locators = NetworkSupport.getServerHostName(locatorVM.getHost()) + "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(locatorVM.getHost()) + "[" + locatorPort + "]";
//start a bridge server with a listener
addBridgeListener(bridge1VM);
@@ -310,7 +310,7 @@ public class AutoConnectionSourceDUnitTest extends LocatorTestBase {
//start a bridge client with a listener
addBridgeListener(clientVM);
- startBridgeClientInVM(clientVM, null, NetworkSupport.getServerHostName(locatorVM.getHost()), locatorPort);
+ startBridgeClientInVM(clientVM, null, NetworkUtils.getServerHostName(locatorVM.getHost()), locatorPort);
// wait for client to connect
checkEndpoints(clientVM, new int[] {serverPort1});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorLoadBalancingDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorLoadBalancingDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorLoadBalancingDUnitTest.java
index 8ed8baf..3c6a980 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorLoadBalancingDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorLoadBalancingDUnitTest.java
@@ -48,8 +48,8 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.internal.logging.LocalLogWriter;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.SerializableRunnableIF;
import com.gemstone.gemfire.test.dunit.VM;
@@ -101,7 +101,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
int serverPort = startBridgeServerInVM(vm1, new String[] {"a", "b"}, locators);
ServerLoad expectedLoad = new ServerLoad(0f, 1 / 800.0f, 0f, 1f);
- ServerLocation expectedLocation = new ServerLocation(NetworkSupport.getServerHostName(vm0
+ ServerLocation expectedLocation = new ServerLocation(NetworkUtils.getServerHostName(vm0
.getHost()), serverPort);
Map expected = new HashMap();
expected.put(expectedLocation, expectedLoad);
@@ -110,7 +110,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
int serverPort2 = startBridgeServerInVM(vm2, new String[] {"a", "b"}, locators);
- ServerLocation expectedLocation2 = new ServerLocation(NetworkSupport.getServerHostName(vm0
+ ServerLocation expectedLocation2 = new ServerLocation(NetworkUtils.getServerHostName(vm0
.getHost()), serverPort2);
expected.put(expectedLocation2, expectedLoad);
@@ -133,18 +133,18 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
int serverPort = startBridgeServerInVM(vm1, new String[] {"a", "b"}, locators);
ServerLoad expectedLoad = new ServerLoad(2/800f, 1 / 800.0f, 0f, 1f);
- ServerLocation expectedLocation = new ServerLocation(NetworkSupport.getServerHostName(host), serverPort);
+ ServerLocation expectedLocation = new ServerLocation(NetworkUtils.getServerHostName(host), serverPort);
Map expected = new HashMap();
expected.put(expectedLocation, expectedLoad);
ClientConnectionResponse response;
response = (ClientConnectionResponse) TcpClient.requestToServer(InetAddress
- .getByName(NetworkSupport.getServerHostName(host)), locatorPort,
+ .getByName(NetworkUtils.getServerHostName(host)), locatorPort,
new ClientConnectionRequest(Collections.EMPTY_SET, null), 10000);
Assert.assertEquals(expectedLocation, response.getServer());
response = (ClientConnectionResponse) TcpClient.requestToServer(InetAddress
- .getByName(NetworkSupport.getServerHostName(host)), locatorPort,
+ .getByName(NetworkUtils.getServerHostName(host)), locatorPort,
new ClientConnectionRequest(Collections.EMPTY_SET, null), 10000, true);
Assert.assertEquals(expectedLocation, response.getServer());
@@ -153,13 +153,13 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
QueueConnectionResponse response2;
response2 = (QueueConnectionResponse) TcpClient.requestToServer(InetAddress
- .getByName(NetworkSupport.getServerHostName(host)), locatorPort,
+ .getByName(NetworkUtils.getServerHostName(host)), locatorPort,
new QueueConnectionRequest(null, 2,
Collections.EMPTY_SET, null, false), 10000, true);
Assert.assertEquals(Collections.singletonList(expectedLocation), response2.getServers());
response2 = (QueueConnectionResponse) TcpClient
- .requestToServer(InetAddress.getByName(NetworkSupport.getServerHostName(host)),
+ .requestToServer(InetAddress.getByName(NetworkUtils.getServerHostName(host)),
locatorPort, new QueueConnectionRequest(null, 5, Collections.EMPTY_SET, null,
false), 10000, true);
@@ -189,13 +189,13 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
//We expect 0 load
Map expected = new HashMap();
- ServerLocation expectedLocation = new ServerLocation(NetworkSupport.getServerHostName(host), serverPort);
+ ServerLocation expectedLocation = new ServerLocation(NetworkUtils.getServerHostName(host), serverPort);
ServerLoad expectedLoad = new ServerLoad(0f, 1 / 800.0f, 0f, 1f);
expected.put(expectedLocation, expectedLoad);
checkLocatorLoad(vm0, expected);
PoolFactoryImpl pf = new PoolFactoryImpl(null);
- pf.addServer(NetworkSupport.getServerHostName(host), serverPort);
+ pf.addServer(NetworkUtils.getServerHostName(host), serverPort);
pf.setMinConnections(8);
pf.setMaxConnections(8);
pf.setSubscriptionEnabled(true);
@@ -236,7 +236,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
startBridgeServerInVM(vm2, new String[] {"a", "b"}, locators);
PoolFactoryImpl pf = new PoolFactoryImpl(null);
- pf.addLocator(NetworkSupport.getServerHostName(host), locatorPort);
+ pf.addLocator(NetworkUtils.getServerHostName(host), locatorPort);
pf.setMinConnections(80);
pf.setMaxConnections(80);
pf.setSubscriptionEnabled(false);
@@ -324,7 +324,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
startBridgeServerInVM(vm3, new String[] {"b"}, locators);
PoolFactoryImpl pf = new PoolFactoryImpl(null);
- pf.addLocator(NetworkSupport.getServerHostName(host), locatorPort);
+ pf.addLocator(NetworkUtils.getServerHostName(host), locatorPort);
pf.setMinConnections(12);
pf.setSubscriptionEnabled(false);
pf.setServerGroup("a");
@@ -336,7 +336,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
checkConnectionCount(vm2, 6);
checkConnectionCount(vm3, 0);
- LogWriterSupport.getLogWriter().info("pool1 prefilled");
+ LogWriterUtils.getLogWriter().info("pool1 prefilled");
PoolFactoryImpl pf2 = (PoolFactoryImpl) PoolManager.createFactory();
pf2.init(pf.getPoolAttributes());
@@ -350,9 +350,9 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
checkConnectionCount(vm2, 9);
checkConnectionCount(vm3, 9);
- LogWriterSupport.getLogWriter().info("pool2 prefilled");
+ LogWriterUtils.getLogWriter().info("pool2 prefilled");
- ServerLocation location1 = new ServerLocation(NetworkSupport.getServerHostName(host), serverPort1);
+ ServerLocation location1 = new ServerLocation(NetworkUtils.getServerHostName(host), serverPort1);
PoolImpl pool1 = (PoolImpl) PoolManager.getAll().get(POOL_NAME);
Assert.assertEquals("a", pool1.getServerGroup());
@@ -361,7 +361,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
pool1.acquireConnection();
}
- LogWriterSupport.getLogWriter().info("aquired 15 connections in pool1");
+ LogWriterUtils.getLogWriter().info("aquired 15 connections in pool1");
//now the load should be equal
checkConnectionCount(vm1, 9);
@@ -373,7 +373,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
pool2.acquireConnection();
}
- LogWriterSupport.getLogWriter().info("aquired 12 connections in pool2");
+ LogWriterUtils.getLogWriter().info("aquired 12 connections in pool2");
//interleave creating connections in both pools
for(int i = 0; i < 6; i++) {
@@ -381,7 +381,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
pool2.acquireConnection();
}
- LogWriterSupport.getLogWriter().info("interleaved 6 connections from pool1 with 6 connections from pool2");
+ LogWriterUtils.getLogWriter().info("interleaved 6 connections from pool1 with 6 connections from pool2");
//The load should still be balanced
checkConnectionCount(vm1, 13);
@@ -407,8 +407,8 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
int serverPort2 = startBridgeServerInVM(vm2, null, locators, new String[] {REGION_NAME}, new MyLoadProbe(load2 ));
HashMap expected = new HashMap();
- ServerLocation l1 = new ServerLocation(NetworkSupport.getServerHostName(host), serverPort1);
- ServerLocation l2 = new ServerLocation(NetworkSupport.getServerHostName(host), serverPort2);
+ ServerLocation l1 = new ServerLocation(NetworkUtils.getServerHostName(host), serverPort1);
+ ServerLocation l2 = new ServerLocation(NetworkUtils.getServerHostName(host), serverPort2);
expected.put(l1, load1);
expected.put(l2, load2);
checkLocatorLoad(vm0, expected);
@@ -428,7 +428,7 @@ public class LocatorLoadBalancingDUnitTest extends LocatorTestBase {
checkLocatorLoad(vm0, expected);
PoolFactoryImpl pf = new PoolFactoryImpl(null);
- pf.addLocator(NetworkSupport.getServerHostName(host), locatorPort);
+ pf.addLocator(NetworkUtils.getServerHostName(host), locatorPort);
pf.setMinConnections(20);
pf.setSubscriptionEnabled(true);
pf.setIdleTimeout(-1);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorTestBase.java
index fb2411d..88459bf 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/LocatorTestBase.java
@@ -46,8 +46,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -112,14 +112,14 @@ public abstract class LocatorTestBase extends DistributedTestCase {
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, String.valueOf(0));
props.setProperty(DistributionConfig.LOCATORS_NAME, otherLocators);
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
try {
File logFile = new File(testName + "-locator" + locatorPort
+ ".log");
InetAddress bindAddr = null;
try {
- bindAddr = InetAddress.getByName(NetworkSupport.getServerHostName(vm.getHost()));
+ bindAddr = InetAddress.getByName(NetworkUtils.getServerHostName(vm.getHost()));
} catch (UnknownHostException uhe) {
Assert.fail("While resolving bind address ", uhe);
}
@@ -306,7 +306,7 @@ public abstract class LocatorTestBase extends DistributedTestCase {
public String getLocatorString(Host host, int[] locatorPorts) {
StringBuffer str = new StringBuffer();
for(int i = 0; i < locatorPorts.length; i++) {
- str.append(NetworkSupport.getServerHostName(host))
+ str.append(NetworkUtils.getServerHostName(host))
.append("[")
.append(locatorPorts[i])
.append("]");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/pooling/ConnectionManagerJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/pooling/ConnectionManagerJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/pooling/ConnectionManagerJUnitTest.java
index 03a170c..d0b2991 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/pooling/ConnectionManagerJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/client/internal/pooling/ConnectionManagerJUnitTest.java
@@ -59,7 +59,7 @@ import com.gemstone.gemfire.internal.cache.PoolStats;
import com.gemstone.gemfire.internal.cache.tier.sockets.ServerQueueStatus;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.internal.logging.LocalLogWriter;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -427,7 +427,7 @@ public class ConnectionManagerJUnitTest {
}
for(int i = 0; i < updaterCount; i++) {
- Threads.join(updaters[i], 30 * 1000, null);
+ ThreadUtils.join(updaters[i], 30 * 1000);
}
if(exception.get() !=null) {
@@ -491,7 +491,7 @@ public class ConnectionManagerJUnitTest {
}
for(int i = 0; i < updaterCount; i++) {
- Threads.join(updaters[i], 30 * 1000, null);
+ ThreadUtils.join(updaters[i], 30 * 1000);
}
if(exception.get() !=null) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsDUnitTest.java
index 4c10044..4e4b10f 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsDUnitTest.java
@@ -82,12 +82,12 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -481,7 +481,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
verifyListenerValue(server1, MemoryState.EVICTION, 2, true);
verifyListenerValue(server1, MemoryState.NORMAL, 1, true);
- LogWriterSupport.getLogWriter().info("before NORMAL->CRITICAL->NORMAL");
+ LogWriterUtils.getLogWriter().info("before NORMAL->CRITICAL->NORMAL");
//NORMAL -> EVICTION -> NORMAL
server2.invoke(new SerializableCallable("NORMAL->CRITICAL->NORMAL") {
public Object call() throws Exception {
@@ -491,7 +491,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
return null;
}
});
- LogWriterSupport.getLogWriter().info("after NORMAL->CRITICAL->NORMAL");
+ LogWriterUtils.getLogWriter().info("after NORMAL->CRITICAL->NORMAL");
verifyListenerValue(server2, MemoryState.CRITICAL, 2, true);
verifyListenerValue(server2, MemoryState.EVICTION, 3, true);
@@ -668,7 +668,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
server1.invoke(new SerializableCallable("local destroy sick member") {
public Object call() throws Exception {
Region r = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("PRLocalDestroy");
+ LogWriterUtils.getLogWriter().info("PRLocalDestroy");
r.localDestroyRegion();
return null;
}
@@ -1287,7 +1287,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
getCache();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(server.getHost()), serverPort);
+ pf.addServer(NetworkUtils.getServerHostName(server.getHost()), serverPort);
pf.create("pool1");
AttributesFactory af = new AttributesFactory();
@@ -1358,7 +1358,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
assertFalse("Key " + me + " should not exist", r.containsKey(me));
}
} catch (LowMemoryException low) {
- LogWriterSupport.getLogWriter().info("Caught LowMemoryException", low);
+ LogWriterUtils.getLogWriter().info("Caught LowMemoryException", low);
if (!catchLowMemoryException) {
Assert.fail("Unexpected exception: ", low);
}
@@ -1577,7 +1577,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
protected Properties getServerProperties() {
Properties p = new Properties();
- p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
return p;
}
@@ -1664,7 +1664,7 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
}
};
final String tenuredPoolName = HeapMemoryMonitor.getTenuredMemoryPoolMXBean().getName();
- LogWriterSupport.getLogWriter().info("TenuredPoolName:"+tenuredPoolName);
+ LogWriterUtils.getLogWriter().info("TenuredPoolName:"+tenuredPoolName);
final List list = internalSystem.getStatsList();
assertFalse(list.isEmpty());
@@ -1674,10 +1674,10 @@ public class MemoryThresholdsDUnitTest extends ClientServerTestCase {
int i=0;
synchronized (list) {
for (Object o : list) {
- LogWriterSupport.getLogWriter().info("List:"+(++i)+":"+o);
+ LogWriterUtils.getLogWriter().info("List:"+(++i)+":"+o);
if (o instanceof StatisticsImpl) {
StatisticsImpl si = (StatisticsImpl)o;
- LogWriterSupport.getLogWriter().info("stat:"+si.getTextId());
+ LogWriterUtils.getLogWriter().info("stat:"+si.getTextId());
if (si.getTextId().contains(tenuredPoolName)) {
sampler.addLocalStatListener(l, si, "currentUsedMemory");
return true;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsOffHeapDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsOffHeapDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsOffHeapDUnitTest.java
index d67d8bc..4a205f3 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsOffHeapDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/management/MemoryThresholdsOffHeapDUnitTest.java
@@ -71,12 +71,12 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -869,7 +869,7 @@ public class MemoryThresholdsOffHeapDUnitTest extends ClientServerTestCase {
vm.invoke(new SerializableCallable("local destroy sick member") {
public Object call() throws Exception {
Region r = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("PRLocalDestroy");
+ LogWriterUtils.getLogWriter().info("PRLocalDestroy");
r.localDestroyRegion();
return null;
}
@@ -1371,7 +1371,7 @@ public class MemoryThresholdsOffHeapDUnitTest extends ClientServerTestCase {
assertFalse("Key " + me + " should not exist", r.containsKey(me));
}
} catch (LowMemoryException low) {
- LogWriterSupport.getLogWriter().info("Caught LowMemoryException", low);
+ LogWriterUtils.getLogWriter().info("Caught LowMemoryException", low);
if (!catchLowMemoryException) {
Assert.fail("Unexpected exception: ", low);
}
@@ -1655,7 +1655,7 @@ public class MemoryThresholdsOffHeapDUnitTest extends ClientServerTestCase {
getCache();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(server.getHost()), serverPort);
+ pf.addServer(NetworkUtils.getServerHostName(server.getHost()), serverPort);
pf.create("pool1");
AttributesFactory af = new AttributesFactory();
@@ -1816,7 +1816,7 @@ public class MemoryThresholdsOffHeapDUnitTest extends ClientServerTestCase {
private Properties getOffHeapProperties() {
Properties p = new Properties();
- p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
p.setProperty(DistributionConfig.OFF_HEAP_MEMORY_SIZE_NAME, "1m");
return p;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/mapInterface/PutAllGlobalLockJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/mapInterface/PutAllGlobalLockJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/mapInterface/PutAllGlobalLockJUnitTest.java
index 76313d9..2f9be54 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/mapInterface/PutAllGlobalLockJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/mapInterface/PutAllGlobalLockJUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.distributed.DistributedSystem;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@Category(IntegrationTest.class)
@@ -79,7 +79,7 @@ public class PutAllGlobalLockJUnitTest {
}
try {
testRegion.putAll(trialMap);
- Threads.join(this.thread, 30 * 1000, null);
+ ThreadUtils.join(this.thread, 30 * 1000);
assertTrue(this.testOK);
} catch (Exception e) {
fail("Test has failed due to "+e);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/partition/PartitionRegionHelperDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/partition/PartitionRegionHelperDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/partition/PartitionRegionHelperDUnitTest.java
index 4e4f4b6..d09c6e0 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/partition/PartitionRegionHelperDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/partition/PartitionRegionHelperDUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.internal.cache.partitioned.fixed.FixedPartitioningTe
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -469,7 +469,7 @@ public class PartitionRegionHelperDUnitTest extends CacheTestCase {
assertTrue(buk0.getBucketAdvisor().isPrimary());
}
catch (ForceReattemptException e) {
- LogWriterSupport.getLogWriter().severe(e);
+ LogWriterUtils.getLogWriter().severe(e);
fail();
}
}
@@ -486,14 +486,14 @@ public class PartitionRegionHelperDUnitTest extends CacheTestCase {
assertNotNull(k1e);
}
catch (ForceReattemptException e) {
- LogWriterSupport.getLogWriter().severe(e);
+ LogWriterUtils.getLogWriter().severe(e);
fail();
}
}
};
for (DistributedMember bom: buk0AllMems) {
VM v = d2v.get(bom);
- LogWriterSupport.getLogWriter().info("Visiting bucket owner member " + bom + " for key " + buk0Key1);
+ LogWriterUtils.getLogWriter().info("Visiting bucket owner member " + bom + " for key " + buk0Key1);
v.invoke(assertHasBucket);
}
@@ -508,14 +508,14 @@ public class PartitionRegionHelperDUnitTest extends CacheTestCase {
assertFalse(buk0.getBucketAdvisor().isPrimary());
}
catch (ForceReattemptException e) {
- LogWriterSupport.getLogWriter().severe(e);
+ LogWriterUtils.getLogWriter().severe(e);
fail();
}
}
};
for (DistributedMember redm: buk0Redundants) {
VM v = d2v.get(redm);
- LogWriterSupport.getLogWriter().info("Visiting redundant member " + redm + " for key " + buk0Key1);
+ LogWriterUtils.getLogWriter().info("Visiting redundant member " + redm + " for key " + buk0Key1);
v.invoke(assertRed);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/CompactRangeIndexDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/CompactRangeIndexDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/CompactRangeIndexDUnitTest.java
index b4988c0..3ce0ce2 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/CompactRangeIndexDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/CompactRangeIndexDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable.CacheSerializableR
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -55,7 +55,7 @@ public class CompactRangeIndexDUnitTest extends DistributedTestCase{
Host host = Host.getHost(0);
vm0 = host.getVM(0);
utils = new QueryTestUtils();
- utils.createServer(vm0, DistributedTestSupport.getAllDistributedSystemProperties(new Properties()));
+ utils.createServer(vm0, DistributedTestUtils.getAllDistributedSystemProperties(new Properties()));
utils.createReplicateRegion("exampleRegion", vm0);
utils.createIndex(vm0,"type", "\"type\"", "/exampleRegion");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HashIndexDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HashIndexDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HashIndexDUnitTest.java
index a5ed9a2..c73aa80 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HashIndexDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HashIndexDUnitTest.java
@@ -22,7 +22,7 @@ import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.query.QueryTestUtils;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -48,7 +48,7 @@ public class HashIndexDUnitTest extends DistributedTestCase{
Host host = Host.getHost(0);
vm0 = host.getVM(0);
utils = new QueryTestUtils();
- utils.createServer(vm0, DistributedTestSupport.getAllDistributedSystemProperties(new Properties()));
+ utils.createServer(vm0, DistributedTestUtils.getAllDistributedSystemProperties(new Properties()));
utils.createReplicateRegion("exampleRegion", vm0);
utils.createHashIndex(vm0,"ID", "r.ID", "/exampleRegion r");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HelperTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HelperTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HelperTestCase.java
index 6461333..f4132da 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HelperTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/HelperTestCase.java
@@ -46,7 +46,7 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -240,7 +240,7 @@ public class HelperTestCase extends CacheTestCase {
final ClientCacheFactory ccf = new ClientCacheFactory(properties);
for (int i = 0; i < servers.length; i++) {
- ccf.addPoolServer(NetworkSupport.getServerHostName(servers[i].getHost()), ports[i]);
+ ccf.addPoolServer(NetworkUtils.getServerHostName(servers[i].getHost()), ports[i]);
}
ccf.setPoolSubscriptionEnabled(true);
ccf.setPoolSubscriptionRedundancy(redundancyLevel);
[21/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHAFailureAndRecoveryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHAFailureAndRecoveryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHAFailureAndRecoveryDUnitTest.java
index e700fa7..69bebdf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHAFailureAndRecoveryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionHAFailureAndRecoveryDUnitTest.java
@@ -36,10 +36,10 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -90,7 +90,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
final int redundancy = 1;
createPartitionRegionAsynch("testMetaDataCleanupOnSinglePRNodeFail_",
startIndexForRegion, endIndexForRegion, localMaxMemory, redundancy, -1);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnSinglePRNodeFail() - PartitionedRegion's created at all VM nodes");
@@ -100,7 +100,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
// disconnect vm0.
DistributedMember dsMember = (DistributedMember)vmArr[0].invoke(this, "disconnectMethod");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testMetaDataCleanupOnSinglePRNodeFail() - VM = " + dsMember
+ " disconnected from the distributed system ");
@@ -108,7 +108,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
vmArr[1].invoke(validateNodeFailMetaDataCleanUp(dsMember));
vmArr[2].invoke(validateNodeFailMetaDataCleanUp(dsMember));
vmArr[3].invoke(validateNodeFailMetaDataCleanUp(dsMember));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnSinglePRNodeFail() - Validation of Failed node config metadata complete");
@@ -117,11 +117,11 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
vmArr[2].invoke(validateNodeFailbucket2NodeCleanUp(dsMember));
vmArr[3].invoke(validateNodeFailbucket2NodeCleanUp(dsMember));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnSinglePRNodeFail() - Validation of Failed node bucket2Node Region metadata complete");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnSinglePRNodeFail() Completed Successfuly ..........");
}
@@ -136,7 +136,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
Cache c = getCache();
Region rootReg = PartitionedRegionHelper.getPRRoot(c);
// Region allPRs = PartitionedRegionHelper.getPRConfigRegion(rootReg, c);
- rootReg.getAttributesMutator().addCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ rootReg.getAttributesMutator().addCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
}
};
@@ -199,7 +199,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
final int redundancy = 1;
createPartitionRegionAsynch("testMetaDataCleanupOnMultiplePRNodeFail_",
startIndexForRegion, endIndexForRegion, localMaxMemory, redundancy, -1);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnMultiplePRNodeFail() - PartitionedRegion's created at all VM nodes");
@@ -208,7 +208,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
// disconnect vm0
DistributedMember dsMember = (DistributedMember)vmArr[0].invoke(this, "disconnectMethod");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testMetaDataCleanupOnMultiplePRNodeFail() - VM = " + dsMember
+ " disconnected from the distributed system ");
@@ -231,7 +231,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
// disconnect vm1
DistributedMember dsMember2 = (DistributedMember)vmArr[1].invoke(this, "disconnectMethod");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testMetaDataCleanupOnMultiplePRNodeFail() - VM = " + dsMember2
+ " disconnected from the distributed system ");
@@ -254,18 +254,18 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
vmArr[2].invoke(validateNodeFailMetaDataCleanUp(dsMember2));
vmArr[3].invoke(validateNodeFailMetaDataCleanUp(dsMember2));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnMultiplePRNodeFail() - Validation of Failed nodes config metadata complete");
vmArr[2].invoke(validateNodeFailbucket2NodeCleanUp(dsMember2));
vmArr[3].invoke(validateNodeFailbucket2NodeCleanUp(dsMember2));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnMultiplePRNodeFail() - Validation of Failed nodes bucket2Node Region metadata complete");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testMetaDataCleanupOnMultiplePRNodeFail() Completed Successfuly ..........");
}
@@ -293,8 +293,8 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
assertEquals(2, cls.length);
CertifiableTestCacheListener ctcl = (CertifiableTestCacheListener) cls[1];
- LogWriterSupport.getLogWriter().info("Listener update (" + ctcl.updates.size() + "): " + ctcl.updates) ;
- LogWriterSupport.getLogWriter().info("Listener destroy: (" + ctcl.destroys.size() + "): " + ctcl.destroys) ;
+ LogWriterUtils.getLogWriter().info("Listener update (" + ctcl.updates.size() + "): " + ctcl.updates) ;
+ LogWriterUtils.getLogWriter().info("Listener destroy: (" + ctcl.destroys.size() + "): " + ctcl.destroys) ;
Iterator itrator = rootReg.keySet().iterator();
for (Iterator itr = itrator; itr.hasNext();) {
@@ -381,7 +381,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
DistributedMember dsMember = ((InternalDistributedSystem)getCache()
.getDistributedSystem()).getDistributionManager().getId();
getCache().getDistributedSystem().disconnect();
- LogWriterSupport.getLogWriter().info("disconnectMethod() completed ..");
+ LogWriterUtils.getLogWriter().info("disconnectMethod() completed ..");
return dsMember;
}
@@ -399,7 +399,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
redundancy, localMaxMemory, recoveryDelay));
}
for (int count2 = 0; count2 < async.length; count2++) {
- Threads.join(async[count2], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count2], 30 * 1000);
}
for (int count2 = 0; count2 < async.length; count2++) {
@@ -449,7 +449,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
assertEquals(bucketOwners.size(), redundantCopies + 1);
DistributedMember bucketOwner = (DistributedMember) bucketOwners.iterator().next();
assertNotNull(bucketOwner);
- LogWriterSupport.getLogWriter().info("Selected distributed member " + bucketOwner + " to disconnect because it hosts bucketId " + bucketId);
+ LogWriterUtils.getLogWriter().info("Selected distributed member " + bucketOwner + " to disconnect because it hosts bucketId " + bucketId);
return bucketOwner;
}
});
@@ -459,7 +459,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
Map stillHasDS = Invoke.invokeInEveryVM(new SerializableCallable("Disconnect provided bucketHost") {
public Object call() throws Exception {
if (getSystem().getDistributedMember().equals(bucketHost)) {
- LogWriterSupport.getLogWriter().info("Disconnecting distributed member " + getSystem().getDistributedMember());
+ LogWriterUtils.getLogWriter().info("Disconnecting distributed member " + getSystem().getDistributedMember());
disconnectFromDS();
return Boolean.FALSE;
}
@@ -522,7 +522,7 @@ public class PartitionedRegionHAFailureAndRecoveryDUnitTest extends
assertEquals(pr.getRedundantCopies() + 1, owners.size());
break; // retry loop
} catch (ForceReattemptException retryIt) {
- LogWriterSupport.getLogWriter().info("Need to retry validation for bucket in PR " + pr, retryIt);
+ LogWriterUtils.getLogWriter().info("Need to retry validation for bucket in PR " + pr, retryIt);
}
} while (true); // retry loop
} // bucketId loop
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionLocalMaxMemoryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionLocalMaxMemoryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionLocalMaxMemoryDUnitTest.java
index b5b090b..28e1bfb 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionLocalMaxMemoryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionLocalMaxMemoryDUnitTest.java
@@ -34,7 +34,7 @@ import com.gemstone.gemfire.cache.util.ObjectSizer;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.internal.cache.lru.Sizeable;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -178,7 +178,7 @@ public class PartitionedRegionLocalMaxMemoryDUnitTest extends
i++;
}
assertEquals(1, pr.getDataStore().localBucket2RegionMap.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"putObjectInPartitionRegion() - Put operation done successfully");
}
else {
@@ -191,7 +191,7 @@ public class PartitionedRegionLocalMaxMemoryDUnitTest extends
fail("Bucket gets created even if no memory is available");
}
catch (PartitionedRegionStorageException e) {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"putObjectInPartitionRegion()- got correct PartitionedRegionStorageException while creating bucket when no memory is available");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionMultipleDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionMultipleDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionMultipleDUnitTest.java
index 82aa308..22d1fd7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionMultipleDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionMultipleDUnitTest.java
@@ -23,8 +23,8 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -95,22 +95,22 @@ public class PartitionedRegionMultipleDUnitTest extends
/** creationg and performing put(),get() operations on Partition Region */
createMultiplePartitionRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionPutAndGet() - Partition Regions Successfully Created ");
validateMultiplePartitionedRegions(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionPutAndGet() - Partition Regions Successfully Validated ");
putInMultiplePartitionedRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionPutAndGet() - Put() Operation done Successfully in Partition Regions ");
getInMultiplePartitionedRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionPutAndGet() - Partition Regions Successfully Validated ");
}
@@ -150,38 +150,38 @@ public class PartitionedRegionMultipleDUnitTest extends
*/
createMultiplePartitionRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Partition Regions Successfully Created ");
validateMultiplePartitionedRegions(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Partition Regions Successfully Validated ");
putInMultiplePartitionedRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Put() Operation done Successfully in Partition Regions ");
destroyInMultiplePartitionedRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Destroy(Key) Operation done Successfully in Partition Regions ");
getDestroyedEntryInMultiplePartitionedRegion(vm0, vm1, vm2, vm3,
startIndexForRegion, endIndexForRegion, afterPutFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Get() Operation after destoy keys done Successfully in Partition Regions ");
putDestroyedEntryInMultiplePartitionedRegion(vm0, vm1, vm2, vm3,
startIndexForRegion, endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Put() Operation after destroy keys done Successfully in Partition Regions ");
afterPutFlag = 1;
getDestroyedEntryInMultiplePartitionedRegion(vm0, vm1, vm2, vm3,
startIndexForRegion, endIndexForRegion, afterPutFlag);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyKeys() - Get() Operation after Put() done Successfully in Partition Regions ");
}
@@ -212,22 +212,22 @@ public class PartitionedRegionMultipleDUnitTest extends
/** creating Partition Regions and testing for the APIs contains() */
createMultiplePartitionRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyAndContainsAPI() - Partition Regions Successfully Created ");
validateMultiplePartitionedRegions(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyAndContainsAPI() - Partition Regions Successfully Validated ");
putInMultiplePartitionedRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyAndContainsAPI() - Put() Operation done Successfully in Partition Regions ");
destroyInMultiplePartitionedRegion(vm0, vm1, vm2, vm3, startIndexForRegion,
endIndexForRegion);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyAndContainsAPI() - Destroy(Key) Operation done Successfully in Partition Regions ");
async[0] = vm0.invokeAsync(validateContainsAPIForPartitionRegion(
@@ -240,7 +240,7 @@ public class PartitionedRegionMultipleDUnitTest extends
startIndexForRegion, endIndexForRegion));
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 120 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 120 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -249,7 +249,7 @@ public class PartitionedRegionMultipleDUnitTest extends
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPartitionedRegionDestroyAndContainsAPI() - Validation of Contains APIs done Successfully in Partition Regions ");
}
@@ -300,7 +300,7 @@ public class PartitionedRegionMultipleDUnitTest extends
startIndexForRegion, endIndexForRegion));
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -336,7 +336,7 @@ public class PartitionedRegionMultipleDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -369,7 +369,7 @@ public class PartitionedRegionMultipleDUnitTest extends
endIndexForRegion));
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -408,7 +408,7 @@ public class PartitionedRegionMultipleDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -466,7 +466,7 @@ public class PartitionedRegionMultipleDUnitTest extends
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"validateContainsAPIForPartitionRegion() - Get() Validations done Successfully in Partition Region "
+ pr.getName());
@@ -481,7 +481,7 @@ public class PartitionedRegionMultipleDUnitTest extends
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"validateContainsAPIForPartitionRegion() - containsKey() Validations done Successfully in Partition Region "
+ pr.getName());
@@ -496,7 +496,7 @@ public class PartitionedRegionMultipleDUnitTest extends
assertTrue(conKey);
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"validateContainsAPIForPartitionRegion() - containsValueForKey() Validations done Successfully in Partition Region "
+ pr.getName());
@@ -510,7 +510,7 @@ public class PartitionedRegionMultipleDUnitTest extends
assertTrue(conKey);
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"validateContainsAPIForPartitionRegion() - containsValue() Validations done Successfully in Partition Region "
+ pr.getName());
@@ -545,7 +545,7 @@ public class PartitionedRegionMultipleDUnitTest extends
startIndexForRegion, endIndexForRegion, afterPutFlag));
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
if (async[count].exceptionOccurred()) {
Assert.fail("exception during " + count, async[count].getException());
}
@@ -588,7 +588,7 @@ public class PartitionedRegionMultipleDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionPRIDDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionPRIDDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionPRIDDUnitTest.java
index e7dc716..f35b39a 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionPRIDDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionPRIDDUnitTest.java
@@ -26,8 +26,8 @@ import com.gemstone.gemfire.cache30.*;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -85,7 +85,7 @@ public class PartitionedRegionPRIDDUnitTest extends
// Create 1/2 * MAX_REGIONS regions in VM 0,1,2 with scope D_ACK.
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, redundancy, prPrefix);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPRIDGenerationInMultiplePartitionRegion() - Partition regions on 3 nodes successfully created");
@@ -101,7 +101,7 @@ public class PartitionedRegionPRIDDUnitTest extends
// VM 3 contains regions from id MAX_REGIONS to 2*MAX_REGIONS only.
createPartitionRegion(vmList, startIndexForRegion, endIndexForRegion,
localMaxMemory, pr2_redundancy, prPrefix);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testPRIDGenerationInMultiplePartitionRegion() - Partition regions on 4 nodes successfully created");
// validating PRID generation for multiple partition regions
@@ -118,7 +118,7 @@ public class PartitionedRegionPRIDDUnitTest extends
/** main thread is waiting for the other threads to complete */
for (int count = 0; count < AsyncInvocationArrSize; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
}
for (int count = 0; count < AsyncInvocationArrSize; count++) {
@@ -208,10 +208,10 @@ public class PartitionedRegionPRIDDUnitTest extends
if (prIdPRSet.size() != PartitionedRegion.prIdToPR.size())
fail("Duplicate PRID are generated in prIdToPR");
- LogWriterSupport.getLogWriter().info("Size of allPartition region : " + prIdSet.size());
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter().info("Size of allPartition region : " + prIdSet.size());
+ LogWriterUtils.getLogWriter()
.info("Size of prIdToPR region : " + prIdPRSet.size());
- LogWriterSupport.getLogWriter().info("PRID generated successfully");
+ LogWriterUtils.getLogWriter().info("PRID generated successfully");
}
};
return validatePRID;
@@ -235,7 +235,7 @@ public class PartitionedRegionPRIDDUnitTest extends
numNodes++;
}
for (int i = 0; i < numNodes; i++) {
- Threads.join(async[i], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[i], 30 * 1000);
}
for (int i = 0; i < numNodes; i++) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionQueryDUnitTest.java
index 415e709..42b34dd 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionQueryDUnitTest.java
@@ -60,8 +60,8 @@ import com.gemstone.gemfire.internal.cache.partitioned.QueryMessage;
import com.gemstone.gemfire.pdx.JSONFormatter;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -944,8 +944,8 @@ public class PartitionedRegionQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
- cf.addPoolServer(NetworkSupport.getServerHostName(server2.getHost()), port2);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server2.getHost()), port2);
ClientCache cache = getClientCache(cf);
Region region = cache.createClientRegionFactory(
@@ -983,12 +983,12 @@ public class PartitionedRegionQueryDUnitTest extends CacheTestCase {
SerializableRunnable closeCache = new CacheSerializableRunnable(
"Close Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close Client. ###");
+ LogWriterUtils.getLogWriter().info("### Close Client. ###");
try {
closeCache();
disconnectFromDS();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to get close client. ###");
+ LogWriterUtils.getLogWriter().info("### Failed to get close client. ###");
}
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSerializableObjectJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSerializableObjectJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSerializableObjectJUnitTest.java
index e8e881f..53c219f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSerializableObjectJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSerializableObjectJUnitTest.java
@@ -35,7 +35,7 @@ import static org.junit.Assert.*;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.distributed.DistributedSystem;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
import junit.framework.TestCase;
@@ -82,7 +82,7 @@ public class PartitionedRegionSerializableObjectJUnitTest
for (int i = 0; i < MAX_THREADS; i++) {
threadArr[i].start();
- Threads.join(threadArr[i], 30 * 1000, null);
+ ThreadUtils.join(threadArr[i], 30 * 1000);
}
for (int i = 0; i < MAX_THREADS; i++) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopDUnitTest.java
index 1c14c36..55fed01 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopDUnitTest.java
@@ -67,11 +67,11 @@ import com.gemstone.gemfire.internal.cache.execute.data.ShipmentId;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -130,7 +130,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
}
finally {
- DistributedTestSupport.unregisterAllDataSerializersFromAllVms();
+ DistributedTestUtils.unregisterAllDataSerializersFromAllVms();
}
}
@@ -196,7 +196,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -210,7 +210,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -224,7 +224,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -238,7 +238,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
return port;
@@ -603,7 +603,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
public void test_SingleHopWithHAWithLocator() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -989,7 +989,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
for (Entry entry : clientMap.entrySet()) {
List list = (List)entry.getValue();
if(list.size()<4){
- LogWriterSupport.getLogWriter().info("still waiting for 4 bucket owners in " + entry.getKey() + ": " + list);
+ LogWriterUtils.getLogWriter().info("still waiting for 4 bucket owners in " + entry.getKey() + ": " + list);
finished = false;
break;
}
@@ -1090,7 +1090,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
member2.invoke(PartitionedRegionSingleHopDUnitTest.class, "verifyMetadata", new Object[]{fclientMap});
member3.invoke(PartitionedRegionSingleHopDUnitTest.class, "verifyMetadata", new Object[]{fclientMap});
} catch (Exception e) {
- LogWriterSupport.getLogWriter().info("verification failed", e);
+ LogWriterUtils.getLogWriter().info("verification failed", e);
return false;
}
return true;
@@ -1414,7 +1414,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1428,7 +1428,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1441,7 +1441,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1454,7 +1454,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
replicatedRegion = cache.createRegion("rr", new AttributesFactory().create());
@@ -1484,7 +1484,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1498,7 +1498,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1512,7 +1512,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1526,7 +1526,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1554,7 +1554,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1570,7 +1570,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1586,7 +1586,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1602,7 +1602,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1638,7 +1638,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1654,7 +1654,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1670,7 +1670,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1686,7 +1686,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
// attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1724,7 +1724,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1738,7 +1738,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1752,7 +1752,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1766,7 +1766,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1802,7 +1802,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1815,7 +1815,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1828,7 +1828,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1841,7 +1841,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attr.setConcurrencyChecksEnabled(true);
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
replicatedRegion = cache.createRegion("rr", new AttributesFactory().create());
@@ -1957,7 +1957,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
RegionAttributes attrs = factory.create();
region = cache.createRegion(PR_NAME, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1968,7 +1968,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attrs = factory.create();
customerRegion = cache.createRegion("CUSTOMER", attrs);
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1979,7 +1979,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attrs = factory.create();
orderRegion = cache.createRegion("ORDER", attrs);
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1990,7 +1990,7 @@ public class PartitionedRegionSingleHopDUnitTest extends CacheTestCase {
attrs = factory.create();
shipmentRegion = cache.createRegion("SHIPMENT", attrs);
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
factory = new AttributesFactory();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopWithServerGroupDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopWithServerGroupDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopWithServerGroupDUnitTest.java
index 9f21031..87738b8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopWithServerGroupDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSingleHopWithServerGroupDUnitTest.java
@@ -42,12 +42,12 @@ import com.gemstone.gemfire.internal.cache.execute.data.OrderId;
import com.gemstone.gemfire.internal.cache.execute.data.ShipmentId;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -157,7 +157,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
}
finally {
- DistributedTestSupport.unregisterAllDataSerializersFromAllVms();
+ DistributedTestUtils.unregisterAllDataSerializersFromAllVms();
}
}
@@ -177,7 +177,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWith2ServerGroup() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -211,7 +211,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWith2ServerGroup2() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -245,7 +245,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWith2ServerGroup2WithoutSystemProperty() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -274,7 +274,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupAccessor() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -307,7 +307,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupOneServerInTwoGroups() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -346,7 +346,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupWithOneDefaultServer() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -379,7 +379,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupClientServerGroupNull() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -412,7 +412,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupTwoClientServerGroup() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -459,7 +459,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupTwoClientServerGroup2() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -503,7 +503,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupTwoClientOneWithOneWithoutServerGroup() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -542,7 +542,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroup2ClientInOneVMServerGroup() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -586,7 +586,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
public void test_SingleHopWithServerGroupColocatedRegionsInDifferentGroup() {
int port3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String host0 = NetworkSupport.getServerHostName(member3.getHost());
+ final String host0 = NetworkUtils.getServerHostName(member3.getHost());
final String locator = host0 + "[" + port3 + "]";
member3.invoke(PartitionedRegionSingleHopWithServerGroupDUnitTest.class,
"startLocatorInVM", new Object[] { port3 });
@@ -827,7 +827,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -840,7 +840,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -853,7 +853,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -866,7 +866,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
return port;
@@ -913,7 +913,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -926,7 +926,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -939,7 +939,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -952,7 +952,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
return port;
@@ -999,7 +999,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1012,7 +1012,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1025,7 +1025,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1038,7 +1038,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1051,7 +1051,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
region2 = cache.createRegion(PR_NAME2, attr.create());
assertNotNull(region2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME2 + " created Successfully :"
+ region2.toString());
@@ -1064,7 +1064,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
customerRegion2 = cache.createRegion(CUSTOMER2, attr.create());
assertNotNull(customerRegion2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER2 created Successfully :"
+ customerRegion2.toString());
@@ -1077,7 +1077,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
orderRegion2 = cache.createRegion(ORDER2, attr.create());
assertNotNull(orderRegion2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER2 created Successfully :"
+ orderRegion2.toString());
@@ -1090,7 +1090,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
shipmentRegion2 = cache.createRegion(SHIPMENT2, attr.create());
assertNotNull(shipmentRegion2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT2 created Successfully :"
+ shipmentRegion2.toString());
@@ -1193,7 +1193,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
RegionAttributes attrs = factory.create();
region = cache.createRegion(PR_NAME, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1202,7 +1202,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
customerRegion = cache.createRegion("CUSTOMER", attrs);
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1211,7 +1211,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
orderRegion = cache.createRegion("ORDER", attrs);
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1220,7 +1220,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
shipmentRegion = cache.createRegion("SHIPMENT", attrs);
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
}
@@ -1232,7 +1232,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
RegionAttributes attrs = factory.create();
region = cache.createRegion(PR_NAME, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1241,7 +1241,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
customerRegion = cache.createRegion("CUSTOMER", attrs);
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1250,7 +1250,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
orderRegion = cache.createRegion("ORDER", attrs);
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1259,7 +1259,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
shipmentRegion = cache.createRegion("SHIPMENT", attrs);
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1270,7 +1270,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
region2 = cache.createRegion(PR_NAME2, attrs);
assertNotNull(region2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + PR_NAME2 + " created Successfully :"
+ region2.toString());
@@ -1279,7 +1279,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
customerRegion2 = cache.createRegion(CUSTOMER2, attrs);
assertNotNull(customerRegion2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region CUSTOMER2 created Successfully :"
+ customerRegion2.toString());
@@ -1288,7 +1288,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
orderRegion2 = cache.createRegion(ORDER2, attrs);
assertNotNull(orderRegion2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region ORDER2 created Successfully :"
+ orderRegion2.toString());
@@ -1297,7 +1297,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
shipmentRegion2 = cache.createRegion(SHIPMENT2, attrs);
assertNotNull(shipmentRegion2);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region SHIPMENT2 created Successfully :"
+ shipmentRegion2.toString());
}
@@ -1309,7 +1309,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
RegionAttributes attrs = factory.create();
region = cache.createRegion(PR_NAME, attrs);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1318,7 +1318,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
customerRegion = cache.createRegion("CUSTOMER", attrs);
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1327,7 +1327,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
orderRegion = cache.createRegion("ORDER", attrs);
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1336,7 +1336,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attrs = factory.create();
shipmentRegion = cache.createRegion("SHIPMENT", attrs);
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
@@ -1368,7 +1368,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
region = cache.createRegion(PR_NAME, attr.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + PR_NAME + " created Successfully :"
+ region.toString());
@@ -1381,7 +1381,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
customerRegion = cache.createRegion("CUSTOMER", attr.create());
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1393,7 +1393,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
orderRegion = cache.createRegion("ORDER", attr.create());
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1405,7 +1405,7 @@ public class PartitionedRegionSingleHopWithServerGroupDUnitTest extends CacheTes
attr.setPartitionAttributes(paf.create());
shipmentRegion = cache.createRegion("SHIPMENT", attr.create());
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
return port;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSizeDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSizeDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSizeDUnitTest.java
index 05f595c..cda653a 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSizeDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionSizeDUnitTest.java
@@ -34,10 +34,10 @@ import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -120,7 +120,7 @@ public class PartitionedRegionSizeDUnitTest extends
public void run2()
{
Cache cache = getCache();
- final int oldLevel = setLogLevel(LogWriterSupport.getLogWriter(), InternalLogWriter.WARNING_LEVEL);
+ final int oldLevel = setLogLevel(LogWriterUtils.getLogWriter(), InternalLogWriter.WARNING_LEVEL);
for (int j = 0; j < MAX_REGIONS; j++) {
Region pr = cache.getRegion(Region.SEPARATOR + PR_PREFIX
+ "DistAckSync" + j);
@@ -130,7 +130,7 @@ public class PartitionedRegionSizeDUnitTest extends
pr.put(key, value);
}
}
- setLogLevel(LogWriterSupport.getLogWriter(), oldLevel);
+ setLogLevel(LogWriterUtils.getLogWriter(), oldLevel);
}
});
@@ -222,7 +222,7 @@ public class PartitionedRegionSizeDUnitTest extends
public void run2()
{
Cache cache = getCache();
- final int oldLevel = setLogLevel(LogWriterSupport.getLogWriter(), InternalLogWriter.WARNING_LEVEL);
+ final int oldLevel = setLogLevel(LogWriterUtils.getLogWriter(), InternalLogWriter.WARNING_LEVEL);
for (int j = 0; j < MAX_REGIONS; j++) {
Region pr = cache.getRegion(Region.SEPARATOR + PR_PREFIX
+ "DistAckASync" + j);
@@ -232,11 +232,11 @@ public class PartitionedRegionSizeDUnitTest extends
pr.put(key, value);
}
}
- setLogLevel(LogWriterSupport.getLogWriter(), oldLevel);
+ setLogLevel(LogWriterUtils.getLogWriter(), oldLevel);
}
});
- Threads.join(async0, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 30 * 1000);
if (async0.exceptionOccurred()) {
Assert.fail("Exception during async0", async0.getException());
@@ -326,7 +326,7 @@ public class PartitionedRegionSizeDUnitTest extends
public void run2()
{
Cache cache = getCache();
- final int oldLevel = setLogLevel(LogWriterSupport.getLogWriter(), InternalLogWriter.WARNING_LEVEL);
+ final int oldLevel = setLogLevel(LogWriterUtils.getLogWriter(), InternalLogWriter.WARNING_LEVEL);
for (int j = 0; j < MAX_REGIONS; j++) {
Region pr = cache.getRegion(Region.SEPARATOR + PR_PREFIX
+ "DistAckSyncChangingVMCount" + j);
@@ -336,7 +336,7 @@ public class PartitionedRegionSizeDUnitTest extends
pr.put(key, value);
}
}
- setLogLevel(LogWriterSupport.getLogWriter(), oldLevel);
+ setLogLevel(LogWriterUtils.getLogWriter(), oldLevel);
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionTestUtilsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionTestUtilsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionTestUtilsDUnitTest.java
index a30ff47..0690a6d 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionTestUtilsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/PartitionedRegionTestUtilsDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -92,7 +92,7 @@ public class PartitionedRegionTestUtilsDUnitTest extends
GsRandom rand = new GsRandom(123);
// Assert that its empty
for(int i=0; i<5; i++) {
- LogWriterSupport.getLogWriter().info("Invocation " + i + " of getSomeKeys");
+ LogWriterUtils.getLogWriter().info("Invocation " + i + " of getSomeKeys");
try {
Set s = null;
s = pr.getSomeKeys(rand);
@@ -112,17 +112,17 @@ public class PartitionedRegionTestUtilsDUnitTest extends
// Assert not empty and has value in an accepable range
for(int i=0; i<5; i++) {
- LogWriterSupport.getLogWriter().info("Invocation " + i + " of getSomeKeys");
+ LogWriterUtils.getLogWriter().info("Invocation " + i + " of getSomeKeys");
try {
Set s = null;
s = pr.getSomeKeys(rand);
assertNotNull(s);
assertFalse(s.isEmpty());
Integer val;
- LogWriterSupport.getLogWriter().info("Invocation " + i + " got " + s.size() + " keys");
+ LogWriterUtils.getLogWriter().info("Invocation " + i + " got " + s.size() + " keys");
for (Iterator it = s.iterator(); it.hasNext(); ) {
Object key = it.next();
- LogWriterSupport.getLogWriter().info("Key: " + key);
+ LogWriterUtils.getLogWriter().info("Key: " + key);
val = (Integer) pr.get(key);
assertNotNull(val);
assertTrue(val.intValue() >= 0);
[29/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRColocatedEquiJoinDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRColocatedEquiJoinDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRColocatedEquiJoinDUnitTest.java
index 79ac200..e6b17e7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRColocatedEquiJoinDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRColocatedEquiJoinDUnitTest.java
@@ -55,7 +55,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -102,40 +102,40 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedCreate(coloName,
redundancy, name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -161,7 +161,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -170,7 +170,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -181,29 +181,29 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -227,7 +227,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
partitionedregion = cache.createRegion(coloName, attr.create());
}
catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreateWithRedundancy: Creation caught IllegalStateException",
ex);
@@ -244,11 +244,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
}
});
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -274,7 +274,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -360,7 +360,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
} catch (FunctionException e) {
if (e.getCause() instanceof UnsupportedOperationException) {
- LogWriterSupport.getLogWriter().info("Query received FunctionException successfully while using QueryService.");
+ LogWriterUtils.getLogWriter().info("Query received FunctionException successfully while using QueryService.");
} else {
fail("UnsupportedOperationException must be thrown here");
}
@@ -368,7 +368,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
}
});
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -385,13 +385,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -400,16 +400,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -418,11 +418,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex2", "r2.id", "/"+coloName+" r2", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex22", "r2.status", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -448,7 +448,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -457,7 +457,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -474,13 +474,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -489,27 +489,27 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedCreate(coloName,
redundancy, name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -535,7 +535,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -544,7 +544,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -561,39 +561,39 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(coloName, NewPortfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -619,7 +619,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -628,7 +628,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -645,13 +645,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -660,16 +660,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -678,11 +678,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex2", "r2.id", "/"+coloName+" r2", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex22", "r2.status", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -708,7 +708,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -717,7 +717,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -734,13 +734,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -749,26 +749,26 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(coloName, NewPortfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -794,7 +794,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -803,7 +803,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -820,39 +820,39 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(coloName,
redundancy, NewPortfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(name, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -878,7 +878,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -887,7 +887,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -904,13 +904,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -918,16 +918,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, NewPortfolio.class));
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex1", "r2.id", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -935,11 +935,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex2", "r1.ID", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -965,7 +965,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -974,7 +974,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -991,29 +991,29 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(coloName,
redundancy, NewPortfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -1021,11 +1021,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1051,7 +1051,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1060,7 +1060,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedDataSetQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -1078,13 +1078,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -1092,16 +1092,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, Portfolio.class));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -1109,11 +1109,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, name));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRColocatedCreate(coloName,
redundancy, name));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1139,7 +1139,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1201,7 +1201,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (QueryException e) {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -1209,11 +1209,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (UnsupportedOperationException uso) {
- LogWriterSupport.getLogWriter().info(uso.getMessage());
+ LogWriterUtils.getLogWriter().info(uso.getMessage());
if (!uso.getMessage().equalsIgnoreCase(LocalizedStrings.DefaultQuery_A_QUERY_ON_A_PARTITIONED_REGION_0_MAY_NOT_REFERENCE_ANY_OTHER_REGION_1.toLocalizedString(new Object[] {name, "/"+coloName}))) {
fail("Query did not throw UnsupportedOperationException while using QueryService instead of LocalQueryService");
} else {
- LogWriterSupport.getLogWriter().info("Query received UnsupportedOperationException successfully while using QueryService.");
+ LogWriterUtils.getLogWriter().info("Query received UnsupportedOperationException successfully while using QueryService.");
}
} finally {
for (int i = 0; i < expectedExceptions.length; i++) {
@@ -1225,7 +1225,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
}
});
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -1235,13 +1235,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -1250,16 +1250,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -1268,11 +1268,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex2", "r2.id", "/"+coloName+" r2, r2.positions.values pos2", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex22", "r2.status", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1298,7 +1298,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1307,7 +1307,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAndRRQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -1318,13 +1318,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -1333,16 +1333,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex1", "r2.id", "/"+coloName+" r2", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -1351,11 +1351,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex2", "r1.ID", "/"+name+" r1, r1.positions.values pos1", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex22", "r2.status", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1381,7 +1381,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1390,7 +1390,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForRRAndPRQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -1400,13 +1400,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -1415,16 +1415,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -1433,11 +1433,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex2", "pos2.id", "/"+coloName+" r2, r2.positions.values pos2", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex22", "r2.status", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1463,7 +1463,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1472,7 +1472,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAndRRQueryWithCompactAndRangeIndexAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -1482,13 +1482,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -1497,16 +1497,16 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex1", "r1.ID", "/"+name+" r1", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(name, "IdIndex11", "r1.status", "/"+name+" r1", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the PR");
@@ -1515,11 +1515,11 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex2", "r2.id", "/"+coloName+" r2", null));
//vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRIndexCreate(coloName, "IdIndex22", "r2.status", "/"+coloName+" r2", null));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the Colocated DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1548,7 +1548,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1560,7 +1560,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAndRRQueryAndCompareResults(name, coloName, localName, coloLocalName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
@@ -1579,13 +1579,13 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR Test with DACK Started");
// Creting PR's on the participating VM's
// Creating DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the DataStore node in the PR");
@@ -1593,22 +1593,22 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
0, Portfolio.class));
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
0, Portfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully created the DataStore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
// Creating Colocated Region DataStore node on the VM0.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Creating the Colocated DataStore node in the RR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForLocalRegionCreation(coloName, NewPortfolio.class));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Successfully Created PR's across all VM's");
@@ -1621,7 +1621,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(coloName, newPortfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Inserted Portfolio data across PR's");
@@ -1707,7 +1707,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
} catch (FunctionException e) {
if (e.getCause() instanceof RegionNotFoundException) {
- LogWriterSupport.getLogWriter().info("Query received FunctionException successfully while using QueryService.");
+ LogWriterUtils.getLogWriter().info("Query received FunctionException successfully while using QueryService.");
} else {
fail("RegionNotFoundException must be thrown here");
}
@@ -1715,7 +1715,7 @@ public class PRColocatedEquiJoinDUnitTest extends PartitionedRegionDUnitTestCase
}
});
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQBasicQueryDUnitTest#testPRBasicQuerying: Querying PR's Test ENDED");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRInvalidQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRInvalidQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRInvalidQueryDUnitTest.java
index adacf1b..aa7f1fe 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRInvalidQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRInvalidQueryDUnitTest.java
@@ -25,7 +25,7 @@ package com.gemstone.gemfire.cache.query.partitioned;
import com.gemstone.gemfire.cache.query.data.PortfolioData;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
public class PRInvalidQueryDUnitTest extends PartitionedRegionDUnitTestCase
@@ -64,7 +64,7 @@ public class PRInvalidQueryDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRDAckCreationAndQueryingWithInvalidQuery() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Querying PR Test with Expected InvalidQueryException*****");
Host host = Host.getHost(0);
@@ -77,17 +77,17 @@ public class PRInvalidQueryDUnitTest extends PartitionedRegionDUnitTestCase
// Creting PR's on the participating VM's
// Creating Accessor node on the VM
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Creating the Accessor node in the PR");
vm0.invoke(prq.getCacheSerializableRunnableForPRAccessorCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Successfully created the Accessor node in the PR");
// Creating the Datastores Nodes in the VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Creating the Datastore node in the PR");
vm1.invoke(prq.getCacheSerializableRunnableForPRCreate(name,
@@ -96,11 +96,11 @@ public class PRInvalidQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy));
vm3.invoke(prq.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Successfully Created the Datastore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Successfully Created PR's across all VM's");
@@ -118,7 +118,7 @@ public class PRInvalidQueryDUnitTest extends PartitionedRegionDUnitTestCase
+ (2 * step), i + (3 * step)));
vm3.invoke(prq.getCacheSerializableRunnableForPRPuts(name, portfolio, i
+ (3 * step), cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: Successfully Inserted Portfolio data across PR's");
@@ -126,7 +126,7 @@ public class PRInvalidQueryDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(prq.getCacheSerializableRunnableForPRInvalidQuery(name,
invalidQuery));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRInvalidQueryDUnitTest#testPRDAckCreationAndQueryingWithInvalidQuery: *****Querying PR's Test with Expected Invalid Query Exception *****");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheCloseDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheCloseDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheCloseDUnitTest.java
index 5a4e65e..6c6c36f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheCloseDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheCloseDUnitTest.java
@@ -37,8 +37,8 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -91,7 +91,7 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRWithCacheCloseInOneDatastoreWithDelay() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Querying PR Test with cache Close PR operation*****");
Host host = Host.getHost(0);
@@ -106,33 +106,33 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
// Creting PR's on the participating VM's
// Creting Accessor PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Creating Accessor node on VM0");
accessor.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Successfully Created Accessor node on VM0");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Creating PR's across all VM1 , VM2");
datastore1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
datastore2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Successfully Created PR on VM1 , VM2");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Creating Local Region on VM0");
accessor.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Successfully Created Local Region on VM0");
@@ -142,36 +142,36 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Inserting Portfolio data through the accessor node");
accessor.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Inserting Portfolio data on local node VM0 for result Set Comparison");
accessor.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
Random random = new Random();
AsyncInvocation async0;
// querying the VM for data
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Querying on VM0 both on PR Region & local ,also Comparing the Results sets from both");
async0 = accessor
.invokeAsync(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Calling for cache close on either of the Datastores VM1 , VM2 at random and then recreating the cache, with a predefined Delay ");
for (int j = 0; j < queryTestCycle; j++) {
@@ -180,7 +180,7 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
name, redundancy));
Wait.pause(threadSleepTime);
}
- Threads.join(async0, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 5 * 60 * 1000);
if (async0.exceptionOccurred()) {
// for now, certain exceptions when a region is closed are acceptable
@@ -200,7 +200,7 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithDelay: Querying with PR Operations ENDED*****");
}
@@ -219,7 +219,7 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
*/
public void testPRWithCacheCloseInOneDatastoreWithoutDelay() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Querying PR Test with cache Close PR operation without delay*****");
Host host = Host.getHost(0);
@@ -232,39 +232,39 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
vmList.add(vm2);
// Creting PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Creating Accessor node on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Successfully Created Accessor node on VM0");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Creating PR's across all VM1 , VM2");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Successfully Created PR on VM1 , VM2");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Successfully Created Local Region on VM0");
- LogWriterSupport.getLogWriter().info("Successfully Created PR's across all VM's");
+ LogWriterUtils.getLogWriter().info("Successfully Created PR's across all VM's");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Successfully Created Local Region on VM0");
@@ -273,22 +273,22 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
@@ -296,14 +296,14 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
AsyncInvocation async0;
// querying the VM for data
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Querying on VM0 both on PR Region & local ,also Comparing the Results sets from both");
async0 = vm0
.invokeAsync(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Calling for cache close on either of the Datastores VM1 , VM2 at random and then recreating the cache, with no delay ");
for (int j = 0; j < queryTestCycle; j++) {
@@ -312,7 +312,7 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
name, redundancy));
}
- Threads.join(async0, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 5 * 60 * 1000);
if (async0.exceptionOccurred()) {
// for now, certain exceptions when a region is closed are acceptable
@@ -332,7 +332,7 @@ public class PRQueryCacheCloseDUnitTest extends PartitionedRegionDUnitTestCase
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryCacheCloseDUnitTest#testPRWithCacheCloseInOneDatastoreWithoutDelay: Querying with PR Operations without delay ENDED*****");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheClosedJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheClosedJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheClosedJUnitTest.java
index 025520a..da64daf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheClosedJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryCacheClosedJUnitTest.java
@@ -34,7 +34,7 @@ import com.gemstone.gemfire.cache.query.RegionNotFoundException;
import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.data.PortfolioData;
import com.gemstone.gemfire.internal.cache.PartitionedRegionTestHelper;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -222,8 +222,8 @@ public class PRQueryCacheClosedJUnitTest
logger
.info("PRQueryCacheClosedJUnitTest#testQueryOnSingleDataStoreWithCacheClose: Waiting for the Threads to join ");
- Threads.join(t1, 30 * 1000, null);
- Threads.join(t2, 30 * 1000, null);
+ ThreadUtils.join(t1, 30 * 1000);
+ ThreadUtils.join(t2, 30 * 1000);
logger
.info("PRQueryCacheClosedJUnitTest#testQueryOnSingleDataStoreWithCacheClose: checking for any Unexpected Exception's occured");
[05/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PrCqUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PrCqUsingPoolDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PrCqUsingPoolDUnitTest.java
index 95dc532..273380d 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PrCqUsingPoolDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PrCqUsingPoolDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache30.ClientServerTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -121,7 +121,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
// create client
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
//createClient(client, port, host0);
String poolName = "testCQAndPartitionedRegion";
@@ -236,7 +236,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
// create client
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testPartitionedCqOnAccessorBridgeServer";
createPool(client, poolName, host0, port);
@@ -330,7 +330,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
// creating an accessor vm with Bridge Server installed.
createServer(server1);
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testPartitionedCqOnSingleBridgeServer";
createPool(client, poolName, host0, port);
@@ -430,7 +430,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
// create client
final int port = server2.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server2.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server2.getHost());
String poolName = "testPRCqOnSingleBridgeServerUpdatesOriginatingAtAccessor";
createPool(client, poolName, host0, port);
@@ -528,7 +528,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testPRCqWithInvalidatesOnBridgeServer";
createPool(client, poolName, host0, port);
@@ -628,7 +628,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testPRCqWithInvalidatesOnAccessorBridgeServer";
createPool(client, poolName, host0, port);
@@ -729,7 +729,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName1 = "testPRCqWithUpdatesFromClients1";
createPool(client, poolName1, host0, port);
@@ -838,7 +838,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
Wait.pause(2000);
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName1 = "testPRCqWithMultipleRegionsOnServer1";
createPool(client, poolName1, host0, port);
@@ -984,7 +984,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
Wait.pause(2000);
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName1 = "testPRWithCQsAndProfileUpdates1";
createPool(client, poolName1, host0, port);
@@ -1203,7 +1203,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
createServer(server2);
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName1 = "testEventsDuringQueryExecution";
createPool(client, poolName1, host0, port);
@@ -1337,7 +1337,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int size = 100;
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCQsWithPutalls";
createPool(client, poolName, new String[]{host0}, new int[]{port});
@@ -1431,7 +1431,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int size = 100;
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCQsWithPutalls";
createPool(client, poolName, new String[]{host0}, new int[]{port});
@@ -1533,7 +1533,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int size = 100;
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCQsWithPutallsTx";
createPool(client, poolName, new String[]{host0}, new int[]{port});
@@ -1655,7 +1655,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
final int size = 100;
final int port = server1.invokeInt(PrCqUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCQsWithPutallsTx";
createPool(client, poolName, new String[]{host0}, new int[]{port});
@@ -1808,7 +1808,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
//AttributesFactory factory = new AttributesFactory();
//factory.setScope(Scope.DISTRIBUTED_ACK);
//factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -1827,7 +1827,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
//assertTrue(getSystem().getDistributionManager().getOtherDistributionManagerIds().size() > 0);
for (int i = 0; i < regions.length; i++) {
Region r = createRegion(regions[i], attr.create());
- LogWriterSupport.getLogWriter().info("Server created the region: "+r);
+ LogWriterUtils.getLogWriter().info("Server created the region: "+r);
}
// pause(2000);
try {
@@ -1885,7 +1885,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
}
for (int i=0; i < servers.length; i++){
- LogWriterSupport.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
+ LogWriterUtils.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
cpf.addServer(servers[i], ports[i]);
}
@@ -1906,8 +1906,8 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info(
"Will connect to server at por: " + serverPorts[0] + " and at host : "
+ serverHost);
//Region region1 = null;
@@ -1930,7 +1930,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
for (int i=0; i < regions.length; i++) {
Region clientRegion = createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + clientRegion);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + clientRegion);
//region1.getAttributesMutator().setCacheListener(new CqListener());
}
}
@@ -1946,7 +1946,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
//getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1957,7 +1957,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -1967,11 +1967,11 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
try {
CqQuery cq1 = cqService.newCq(cqName, queryStr, cqa);
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
- LogWriterSupport.getLogWriter().info("Created a new CqQuery : "+cq1);
+ LogWriterUtils.getLogWriter().info("Created a new CqQuery : "+cq1);
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
throw err;
}
}
@@ -1991,7 +1991,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -2005,7 +2005,7 @@ public class PrCqUsingPoolDUnitTest extends CacheTestCase {
m.put(KEY+i, new Portfolio(i));
}
region1.putAll(m);
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQDUnitTest.java
index eb81ce2..49a1252 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQDUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.cache.query.cq.dunit.CqQueryTestListener;
import com.gemstone.gemfire.cache.query.dunit.PdxQueryCQTestBase.TestObject;
import com.gemstone.gemfire.cache30.ClientServerTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -88,7 +88,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
final int port0 = vm0.invokeInt(PdxQueryCQTestBase.class, "getCacheServerPort");
final int port1 = vm1.invokeInt(PdxQueryCQTestBase.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Create client pool.
final String poolName = "testCqPool";
@@ -99,7 +99,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
// Execute CQ
SerializableRunnable executeCq = new CacheSerializableRunnable("Execute queries") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -109,7 +109,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -129,7 +129,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("QueryService is :" + qService, err);
+ LogWriterUtils.getLogWriter().info("QueryService is :" + qService, err);
throw err;
}
}
@@ -168,7 +168,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
SerializableRunnable validateCq = new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -269,7 +269,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
final int port0 = vm0.invokeInt(PdxQueryCQTestBase.class, "getCacheServerPort");
final int port1 = vm1.invokeInt(PdxQueryCQTestBase.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Create client pool.
final String poolName = "testCqPool";
@@ -318,7 +318,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
}
region.registerInterest(list);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -329,7 +329,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
// Create CQ Attributes.
for (int i=0; i < queries.length; i++) {
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = (cqName + i);
cqf.initCqListeners(cqListeners);
@@ -349,7 +349,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("QueryService is :" + qService, err);
+ LogWriterUtils.getLogWriter().info("QueryService is :" + qService, err);
throw err;
}
}
@@ -473,7 +473,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
final int port1 = vm1.invokeInt(PdxQueryCQTestBase.class, "getCacheServerPort");
final int port2 = vm2.invokeInt(PdxQueryCQTestBase.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Create client pool.
final String poolName = "testCqPool";
@@ -509,7 +509,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
}
region.registerInterest(list);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -520,7 +520,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
// Create CQ Attributes.
for (int i=0; i < queries.length; i++) {
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = (cqName + i);
cqf.initCqListeners(cqListeners);
@@ -540,7 +540,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("QueryService is :" + qService, err);
+ LogWriterUtils.getLogWriter().info("QueryService is :" + qService, err);
throw err;
}
}
@@ -658,7 +658,7 @@ public class PdxQueryCQDUnitTest extends PdxQueryCQTestBase {
final int updateEvents) {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQTestBase.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQTestBase.java
index 947a4ac..f99a316 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQTestBase.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxQueryCQTestBase.java
@@ -131,7 +131,7 @@ public abstract class PdxQueryCQTestBase extends CacheTestCase {
cpf.setSubscriptionEnabled(subscriptionEnabled);
cpf.setSubscriptionRedundancy(redundancy);
for (int i=0; i < servers.length; i++){
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
cpf.addServer(servers[i], ports[i]);
}
cpf.create(poolName);
@@ -157,14 +157,14 @@ public abstract class PdxQueryCQTestBase extends CacheTestCase {
}
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Executing Query on server:" + queryStr);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Executing Query on server:" + queryStr);
Query query = remoteQueryService.newQuery(queryStr);
rs[0][0] = (SelectResults)query.execute();
//printResults (rs[0][0], " ### Remote Query Results : ####");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Executing Query locally:" + queryStr);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Executing Query locally:" + queryStr);
query = localQueryService.newQuery(queryStr);
rs[0][1] = (SelectResults)query.execute();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Remote Query rs size: " + (rs[0][0]).size() +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Remote Query rs size: " + (rs[0][0]).size() +
"Local Query rs size: " + (rs[0][1]).size());
//printResults (rs[0][1], " ### Local Query Results : ####");
// Compare local and remote query results.
@@ -255,7 +255,7 @@ public abstract class PdxQueryCQTestBase extends CacheTestCase {
for (int i=0; i < queryString.length; i++){
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
results = (SelectResults)query.execute(params[i]);
} catch (Exception e) {
@@ -294,12 +294,12 @@ public abstract class PdxQueryCQTestBase extends CacheTestCase {
SerializableRunnable closeCache =
new CacheSerializableRunnable("Close Client") {
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Close Client. ###");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Close Client. ###");
try {
closeCache();
disconnectFromDS();
} catch (Exception ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Failed to get close client. ###");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Failed to get close client. ###");
}
}
};
@@ -334,13 +334,13 @@ public abstract class PdxQueryCQTestBase extends CacheTestCase {
@Override
public boolean equals(Object o){
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("In TestObject2.equals() this: " + this + " other :" + o);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("In TestObject2.equals() this: " + this + " other :" + o);
GemFireCacheImpl.getInstance().getLoggerI18n().fine("In TestObject2.equals() this: " + this + " other :" + o);
TestObject2 other = (TestObject2)o;
if (_id == other._id) {
return true;
} else {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("NOT EQUALS");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("NOT EQUALS");
return false;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUpdateRIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUpdateRIDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUpdateRIDUnitTest.java
index e1208da..6e4de65 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUpdateRIDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUpdateRIDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache30.ClientServerTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -98,7 +98,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Init values at server.
final int size = 10;
@@ -144,7 +144,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testClientIndexUpdateWithRegisterInterest";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -195,7 +195,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
//Init values at server.
final int size = 10;
@@ -236,7 +236,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
//Init values at server.
final int size = 10;
@@ -283,7 +283,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
//Init values at server.
final int size = 1000;
@@ -340,7 +340,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Init values at server.
final int size = 10;
@@ -381,7 +381,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Init values at server.
final int size = 10;
@@ -422,7 +422,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
final int port = server.invokeInt(QueryIndexUpdateRIDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Init values at server.
final int size = 10;
@@ -469,7 +469,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
} else {
region = getRootRegion().getSubregion(regionName);
}
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
} catch (Exception cqe) {
AssertionError err = new AssertionError("Failed to get Region.");
err.initCause(cqe);
@@ -512,7 +512,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
} else {
region = getRootRegion().getSubregion(regionName);
}
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
} catch (Exception cqe) {
AssertionError err = new AssertionError("Failed to get Region.");
err.initCause(cqe);
@@ -548,7 +548,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
AttributesFactory factory = new AttributesFactory();
factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -624,7 +624,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
// getLogWriter().info("### puting '"+KEY+i+"' in region " + region1);
region1.put(KEY+i, new Portfolio((start != 0 ? start : 1) * i, i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -646,7 +646,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -669,7 +669,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
}
createRootRegion(regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Root Region on Client");
+ LogWriterUtils.getLogWriter().info("### Successfully Created Root Region on Client");
}
};
@@ -690,7 +690,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
public void validateQueryOnIndexWithRegion(VM vm, final String query, final int resultSize, final String region) {
vm.invoke(new CacheSerializableRunnable("Validate Query") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating Query. ###");
+ LogWriterUtils.getLogWriter().info("### Validating Query. ###");
QueryService qs = getCache().getQueryService();
Query q = qs.newQuery(query);
@@ -701,7 +701,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
Object r = q.execute();
if(r instanceof SelectResults){
int rSize = ((SelectResults)r).asSet().size();
- LogWriterSupport.getLogWriter().info("### Result Size is :" + rSize);
+ LogWriterUtils.getLogWriter().info("### Result Size is :" + rSize);
if(region == null) {
assertEquals(resultSize, rSize);
@@ -731,7 +731,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
public void asyncClearRegion(VM vm, final String regionName){
vm.invokeAsync(new CacheSerializableRunnable("Destroy entries") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Clearing Region. ###");
+ LogWriterUtils.getLogWriter().info("### Clearing Region. ###");
Region region1;
if(!"root".equals(regionName)){
region1 = getRootRegion().getSubregion(regionName);
@@ -739,7 +739,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
region1 = getRootRegion();
}
region1.clear();
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -747,7 +747,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
private SerializableRunnable getSRClearRegion(final String regionName) {
SerializableRunnable sr = new CacheSerializableRunnable("Destroy entries") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Clearing Region. ###");
+ LogWriterUtils.getLogWriter().info("### Clearing Region. ###");
Region region1;
if(!"root".equals(regionName)){
region1 = getRootRegion().getSubregion(regionName);
@@ -755,7 +755,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
region1 = getRootRegion();
}
region1.clear();
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
};
return sr;
@@ -774,7 +774,7 @@ public class QueryIndexUpdateRIDUnitTest extends CacheTestCase{
} else {
region = getRootRegion().getSubregion(regionName);
}
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
} catch (Exception cqe) {
AssertionError err = new AssertionError("Failed to get Region.");
err.initCause(cqe);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryMonitorDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryMonitorDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryMonitorDUnitTest.java
index 68f0bfa..febe78e 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryMonitorDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryMonitorDUnitTest.java
@@ -50,12 +50,12 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -160,7 +160,7 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
r = new SerializableRunnable("getClientSystem") {
public void run() {
- Properties props = DistributedTestSupport.getAllDistributedSystemProperties(new Properties());
+ Properties props = DistributedTestUtils.getAllDistributedSystemProperties(new Properties());
props.put(DistributionConfigImpl.LOCATORS_NAME, "");
getSystem(props);
}
@@ -264,7 +264,7 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
for (int i=0; i < server.length; i++){
port[i] = server[i].invokeInt(QueryMonitorDUnitTest.class, "getCacheServerPort");
}
- final String host0 = NetworkSupport.getServerHostName(server[0].getHost());
+ final String host0 = NetworkUtils.getServerHostName(server[0].getHost());
SerializableRunnable initClient = new CacheSerializableRunnable("Init client") {
public void run2() throws CacheException {
@@ -1020,7 +1020,7 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server, 0, true);
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
cqDUnitTest.createClient(client, port, host0);
@@ -1137,7 +1137,7 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
exampleRegion.put(""+i, new Portfolio(i));
}
}
- LogWriterSupport.getLogWriter().info("### Completed updates in server1 in testCacheOpAfterQueryCancel");
+ LogWriterUtils.getLogWriter().info("### Completed updates in server1 in testCacheOpAfterQueryCancel");
}
});
@@ -1150,7 +1150,7 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
exampleRegion.put(""+i, new Portfolio(i));
}
}
- LogWriterSupport.getLogWriter().info("### Completed updates in server2 in testCacheOpAfterQueryCancel");
+ LogWriterUtils.getLogWriter().info("### Completed updates in server2 in testCacheOpAfterQueryCancel");
}
});
@@ -1171,17 +1171,17 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
Query query = queryService.newQuery(qStr);
query.execute();
} catch (QueryExecutionTimeoutException qet) {
- LogWriterSupport.getLogWriter().info("### Got Expected QueryExecutionTimeout exception. " +
+ LogWriterUtils.getLogWriter().info("### Got Expected QueryExecutionTimeout exception. " +
qet.getMessage());
if (qet.getMessage().contains("cancelled after exceeding max execution")){
- LogWriterSupport.getLogWriter().info("### Doing a put operation");
+ LogWriterUtils.getLogWriter().info("### Doing a put operation");
exampleRegion.put(""+i, new Portfolio(i));
}
} catch (Exception e){
fail("Exception executing query." + e.getMessage());
}
}
- LogWriterSupport.getLogWriter().info("### Completed Executing queries in testCacheOpAfterQueryCancel");
+ LogWriterUtils.getLogWriter().info("### Completed Executing queries in testCacheOpAfterQueryCancel");
} catch (Exception ex){
Assert.fail("Exception creating the query service", ex);
}
@@ -1191,23 +1191,23 @@ public class QueryMonitorDUnitTest extends CacheTestCase {
AsyncInvocation ai3 = server3.invokeAsync(executeQuery);
AsyncInvocation ai4 = server4.invokeAsync(executeQuery);
- LogWriterSupport.getLogWriter().info("### Waiting for async threads to join in testCacheOpAfterQueryCancel");
+ LogWriterUtils.getLogWriter().info("### Waiting for async threads to join in testCacheOpAfterQueryCancel");
try {
- Threads.join(ai1, 5 * 60 * 1000, null);
- Threads.join(ai2, 5 * 60 * 1000, null);
- Threads.join(ai3, 5 * 60 * 1000, null);
- Threads.join(ai4, 5 * 60 * 1000, null);
+ ThreadUtils.join(ai1, 5 * 60 * 1000);
+ ThreadUtils.join(ai2, 5 * 60 * 1000);
+ ThreadUtils.join(ai3, 5 * 60 * 1000);
+ ThreadUtils.join(ai4, 5 * 60 * 1000);
} catch (Exception ex) {
fail("Async thread join failure");
}
- LogWriterSupport.getLogWriter().info("### DONE Waiting for async threads to join in testCacheOpAfterQueryCancel");
+ LogWriterUtils.getLogWriter().info("### DONE Waiting for async threads to join in testCacheOpAfterQueryCancel");
validateQueryMonitorThreadCnt(server1, 0, 1000);
validateQueryMonitorThreadCnt(server2, 0, 1000);
validateQueryMonitorThreadCnt(server3, 0, 1000);
validateQueryMonitorThreadCnt(server4, 0, 1000);
- LogWriterSupport.getLogWriter().info("### DONE validating query monitor threads testCacheOpAfterQueryCancel");
+ LogWriterUtils.getLogWriter().info("### DONE validating query monitor threads testCacheOpAfterQueryCancel");
stopServer(server1);
stopServer(server2);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/snapshot/ClientSnapshotDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/snapshot/ClientSnapshotDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/snapshot/ClientSnapshotDUnitTest.java
index 19c0ae7..540b37b 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/snapshot/ClientSnapshotDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/snapshot/ClientSnapshotDUnitTest.java
@@ -41,8 +41,8 @@ import com.gemstone.gemfire.cache.util.CqListenerAdapter;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
public class ClientSnapshotDUnitTest extends CacheTestCase {
@@ -255,9 +255,9 @@ public class ClientSnapshotDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory()
- .set("log-level", LogWriterSupport.getDUnitLogLevel())
+ .set("log-level", LogWriterUtils.getDUnitLogLevel())
.setPdxSerializer(new MyPdxSerializer())
- .addPoolServer(NetworkSupport.getServerHostName(host), port)
+ .addPoolServer(NetworkUtils.getServerHostName(host), port)
.setPoolSubscriptionEnabled(true)
.setPoolPRSingleHopEnabled(false);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PRDeltaPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PRDeltaPropagationDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PRDeltaPropagationDUnitTest.java
index 7327e94..7dcd120 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PRDeltaPropagationDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PRDeltaPropagationDUnitTest.java
@@ -59,7 +59,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -824,7 +824,7 @@ public class PRDeltaPropagationDUnitTest extends DistributedTestCase {
assertNotNull(cache);
deltaPR = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(deltaPR);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + deltaPR);
}
[07/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfDUnitTest.java
index 7ba2f68..a9747f2 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfDUnitTest.java
@@ -38,8 +38,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -87,7 +87,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
cqDUnitTest.createClient(client, port, host0);
@@ -95,7 +95,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
client.invoke(new CacheSerializableRunnable("Create CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -106,7 +106,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqTimeTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqTimeTestListener(LogWriterUtils.getLogWriter())};
((CqTimeTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -118,7 +118,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
@@ -138,7 +138,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
client.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -192,7 +192,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client, port, host0);
@@ -218,7 +218,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -249,7 +249,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
Collection<? extends InternalCqQuery> cqs = cqService.getAllCqs();
@@ -280,7 +280,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -312,7 +312,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
Collection<? extends InternalCqQuery> cqs = cqService.getAllCqs();
@@ -342,7 +342,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -374,7 +374,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
Collection<? extends InternalCqQuery> cqs = cqService.getAllCqs();
@@ -412,7 +412,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client, port, host0);
// Create and Execute same kind of CQs.
@@ -527,7 +527,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
for (int clientIndex=0; clientIndex < 3; clientIndex++){
cqDUnitTest.createClient(clients[clientIndex], port, host0);
@@ -686,7 +686,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
VM clients[] = new VM[]{client1, client2};
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Create client.
// Create client with redundancyLevel -1
@@ -849,7 +849,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
VM clients[] = new VM[]{client1, client2};
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
cqDUnitTest.createLocalRegion(client1, new int[] {port1, ports[0]}, host0, "-1", cqDUnitTest.regions);
@@ -898,7 +898,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Create client.
// final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
@@ -973,7 +973,7 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = (CqServiceImpl) ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -999,12 +999,12 @@ public class CqPerfDUnitTest extends CacheTestCase {
try {
cqService = (CqServiceImpl) ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
long timeTaken = cqService.getCqServiceVsdStats().getCqQueryExecutionTime();
- LogWriterSupport.getLogWriter().info("Total Time taken to Execute CQ Query :" + timeTaken);
+ LogWriterUtils.getLogWriter().info("Total Time taken to Execute CQ Query :" + timeTaken);
//System.out.println("Total Time taken to Execute CQ Query :" + timeTaken);
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfUsingPoolDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfUsingPoolDUnitTest.java
index eb21a64..3c7e3b5 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfUsingPoolDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqPerfUsingPoolDUnitTest.java
@@ -38,8 +38,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -87,7 +87,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
cqDUnitTest.createClient(client, port, host0);
@@ -95,7 +95,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
client.invoke(new CacheSerializableRunnable("Create CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -106,7 +106,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqTimeTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqTimeTestListener(LogWriterUtils.getLogWriter())};
((CqTimeTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -118,7 +118,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
@@ -138,7 +138,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
client.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -192,7 +192,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// cqDUnitTest.createClient(client, port, host0);
String poolName = "testKeyMaintainance";
@@ -220,7 +220,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -252,7 +252,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
Collection<? extends InternalCqQuery> cqs = cqService.getAllCqs();
@@ -283,7 +283,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -316,7 +316,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
Collection<? extends InternalCqQuery> cqs = cqService.getAllCqs();
@@ -345,7 +345,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -376,7 +376,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
Collection<? extends InternalCqQuery> cqs = cqService.getAllCqs();
@@ -412,7 +412,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
//cqDUnitTest.createClient(client, port, host0);
String poolName = "testMatchingCqs";
@@ -530,7 +530,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testMatchingCQWithMultipleClients";
for (int clientIndex=0; clientIndex < 3; clientIndex++){
String cPoolName = "testMatchingCQWithMultipleClients" + clientIndex;
@@ -693,7 +693,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
VM clients[] = new VM[]{client1, client2};
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Create client.
// Create client with redundancyLevel -1
@@ -858,7 +858,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Create client.
// final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
@@ -933,7 +933,7 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = (CqServiceImpl) ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -959,12 +959,12 @@ public class CqPerfUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = (CqServiceImpl) ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
long timeTaken = cqService.getCqServiceVsdStats().getCqQueryExecutionTime();
- LogWriterSupport.getLogWriter().info("Total Time taken to Execute CQ Query :" + timeTaken);
+ LogWriterUtils.getLogWriter().info("Total Time taken to Execute CQ Query :" + timeTaken);
System.out.println("Total Time taken to Execute CQ Query :" + timeTaken);
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryDUnitTest.java
index f811427..24187a0 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryDUnitTest.java
@@ -67,8 +67,8 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -213,7 +213,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setMirrorType(mirrorType);
@@ -251,7 +251,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
try {
startBridgeServer(thePort, true);
}
@@ -300,7 +300,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
public void closeServer(VM server) {
server.invoke(new SerializableRunnable("Close CacheServer") {
public void run() {
- LogWriterSupport.getLogWriter().info("### Close CacheServer. ###");
+ LogWriterUtils.getLogWriter().info("### Close CacheServer. ###");
stopBridgeServer(getCache());
}
});
@@ -311,7 +311,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
server.invoke(new SerializableRunnable("Crash CacheServer") {
public void run() {
com.gemstone.gemfire.cache.client.internal.ConnectionImpl.setTEST_DURABLE_CLIENT_CRASH(true);
- LogWriterSupport.getLogWriter().info("### Crashing CacheServer. ###");
+ LogWriterUtils.getLogWriter().info("### Crashing CacheServer. ###");
stopBridgeServer(getCache());
}
});
@@ -322,7 +322,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
server.invoke(new SerializableRunnable("Close CacheServer") {
public void run() {
com.gemstone.gemfire.cache.client.internal.ConnectionImpl.setTEST_DURABLE_CLIENT_CRASH(false);
- LogWriterSupport.getLogWriter().info("### Crashing CacheServer. ###");
+ LogWriterUtils.getLogWriter().info("### Crashing CacheServer. ###");
stopBridgeServer(getCache());
}
});
@@ -340,7 +340,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -359,7 +359,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
}
for (int i=0; i < regions.length; i++) {
createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
//region1.getAttributesMutator().setCacheListener(new CqListener());
}
}
@@ -373,7 +373,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Local Region") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Local Region. ###");
+ LogWriterUtils.getLogWriter().info("### Create Local Region. ###");
AttributesFactory af = new AttributesFactory();
af.setScope(Scope.LOCAL);
@@ -386,7 +386,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
RegionFactory rf = getCache().createRegionFactory(af.create());
for (int i = 0; i < regionNames.length; i++) {
rf.create(regionNames[i]);
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
}
}
};
@@ -398,7 +398,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -421,8 +421,8 @@ public class CqQueryDUnitTest extends CacheTestCase {
}
createRegion(regions[0], regionFactory0.createRegionAttributes());
createRegion(regions[1], regionFactory1.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[0]);
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[1]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[0]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[1]);
}
};
@@ -436,11 +436,11 @@ public class CqQueryDUnitTest extends CacheTestCase {
SerializableRunnable closeCQService =
new CacheSerializableRunnable("Close Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close Client. ###");
+ LogWriterUtils.getLogWriter().info("### Close Client. ###");
try {
((DefaultQueryService)getCache().getQueryService()).closeCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to get CqService during ClientClose() ###");
+ LogWriterUtils.getLogWriter().info("### Failed to get CqService during ClientClose() ###");
}
}
@@ -459,7 +459,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -474,7 +474,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
portfolio.createTime = System.currentTimeMillis();
region1.put(KEY+i, portfolio);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -488,7 +488,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
portfolio.shortID = new Short(""+i);
region1.put(KEY+i, portfolio);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -520,7 +520,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
}
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -552,7 +552,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
}
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -564,7 +564,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.put("key" + i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -592,7 +592,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.destroy(KEY+i);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
}
});
@@ -608,7 +608,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.invalidate(KEY+i);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
}
});
@@ -626,7 +626,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
//getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -637,7 +637,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
// ((CqQueryTestListener)cqListeners[0]).cqName = cqName;
// if (isBridgeMemberTest) {
// testListenerForBridgeMembershipTest = (CqQueryTestListener)cqListeners[0];
@@ -653,7 +653,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
throw err;
}
}
@@ -665,10 +665,10 @@ public class CqQueryDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Create CQ with no name:" ) {
public void run2() throws CacheException {
//pause(60 * 1000);
- LogWriterSupport.getLogWriter().info("### DEBUG CREATE CQ START ####");
+ LogWriterUtils.getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ with no name. ###");
+ LogWriterUtils.getLogWriter().info("### Create CQ with no name. ###");
// Get CQ Service.
QueryService cqService = null;
CqQuery cq1 = null;
@@ -684,7 +684,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
for (int i = 0; i < 20; ++i) {
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
cqf.initCqListeners(cqListeners);
CqAttributes cqa = cqf.create();
@@ -694,40 +694,40 @@ public class CqQueryDUnitTest extends CacheTestCase {
cq1 = cqService.newCq(queryStr, cqa);
((CqQueryTestListener)cqListeners[0]).cqName = cq1.getName();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CQService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CQService is :" + cqService);
ex.printStackTrace();
fail("Failed to create CQ with no name" + " . " + ex.getMessage());
}
if (cq1 == null) {
- LogWriterSupport.getLogWriter().info("Failed to get CqQuery object for CQ with no name.");
+ LogWriterUtils.getLogWriter().info("Failed to get CqQuery object for CQ with no name.");
}
else {
cqName = cq1.getName();
- LogWriterSupport.getLogWriter().info("Created CQ with no name, generated CQ name: " + cqName + " CQ state:" + cq1.getState());
+ LogWriterUtils.getLogWriter().info("Created CQ with no name, generated CQ name: " + cqName + " CQ state:" + cq1.getState());
assertTrue("Create CQ with no name illegal state", cq1.getState().isStopped());
}
if ( i%2 == 0) {
try {
cqResults = cq1.executeWithInitialResults();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
fail("Failed to execute CQ with initial results, cq name: " + cqName + " . " + ex.getMessage());
}
- LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
- LogWriterSupport.getLogWriter().info("CQ state after execute with initial results = " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
+ LogWriterUtils.getLogWriter().info("CQ state after execute with initial results = " + cq1.getState());
assertTrue("executeWithInitialResults() state mismatch", cq1.getState().isRunning());
}
else {
try {
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CQService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CQService is :" + cqService);
ex.printStackTrace();
fail("Failed to execute CQ " + cqName + " . " + ex.getMessage());
}
- LogWriterSupport.getLogWriter().info("CQ state after execute = " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("CQ state after execute = " + cq1.getState());
assertTrue("execute() state mismatch", cq1.getState().isRunning());
}
@@ -735,7 +735,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
try {
cq1.close();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, ex);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, ex);
fail("Failed to close CQ " + cqName + " . " + ex.getMessage());
}
assertTrue("closeCq() state mismatch", cq1.getState().isClosed());
@@ -763,7 +763,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
private void work() throws CacheException {
//pause(60 * 1000);
- LogWriterSupport.getLogWriter().info("### DEBUG EXECUTE CQ START ####");
+ LogWriterUtils.getLogWriter().info("### DEBUG EXECUTE CQ START ####");
//pause(20 * 1000);
// Get CQ Service.
@@ -783,16 +783,16 @@ public class CqQueryDUnitTest extends CacheTestCase {
try {
cq1 = cqService.getCq(cqName);
if (cq1 == null) {
- LogWriterSupport.getLogWriter().info("Failed to get CqQuery object for CQ name: " + cqName);
+ LogWriterUtils.getLogWriter().info("Failed to get CqQuery object for CQ name: " + cqName);
fail("Failed to get CQ " + cqName);
}
else {
- LogWriterSupport.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
+ LogWriterUtils.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
}
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
- LogWriterSupport.getLogWriter().error(ex);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().error(ex);
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
throw err;
@@ -804,13 +804,13 @@ public class CqQueryDUnitTest extends CacheTestCase {
try {
cqResults = cq1.executeWithInitialResults();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
throw err;
}
- LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
+ LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
assertTrue("executeWithInitialResults() state mismatch", cq1.getState().isRunning());
if (expectedResultsSize >= 0) {
assertEquals("unexpected results size", expectedResultsSize, cqResults.size());
@@ -823,7 +823,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
if (expectedErr == null) {
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
}
throw err;
}
@@ -853,7 +853,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
public void stopCQ(VM vm, final String cqName) throws Exception {
vm.invoke(new CacheSerializableRunnable("Stop CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Stop CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Stop CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -883,7 +883,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Stop CQ :" + cqName) {
public void run2() throws CacheException {
CqQuery cq1 = null;
- LogWriterSupport.getLogWriter().info("### Stop and Exec CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Stop and Exec CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -910,8 +910,8 @@ public class CqQueryDUnitTest extends CacheTestCase {
fail("Count = " + i + "Failed to stop CQ " + cqName + " . " + ex.getMessage());
}
assertTrue("Stop CQ state mismatch, count = " + i, cq1.getState().isStopped());
- LogWriterSupport.getLogWriter().info("After stop in Stop and Execute loop, ran successfully, loop count: " + i);
- LogWriterSupport.getLogWriter().info("CQ state: " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("After stop in Stop and Execute loop, ran successfully, loop count: " + i);
+ LogWriterUtils.getLogWriter().info("CQ state: " + cq1.getState());
// Re-execute CQ
try {
@@ -921,8 +921,8 @@ public class CqQueryDUnitTest extends CacheTestCase {
fail("Count = " + i + "Failed to execute CQ " + cqName + " . " + ex.getMessage());
}
assertTrue("Execute CQ state mismatch, count = " + i, cq1.getState().isRunning());
- LogWriterSupport.getLogWriter().info("After execute in Stop and Execute loop, ran successfully, loop count: " + i);
- LogWriterSupport.getLogWriter().info("CQ state: " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("After execute in Stop and Execute loop, ran successfully, loop count: " + i);
+ LogWriterUtils.getLogWriter().info("CQ state: " + cq1.getState());
}
}
});
@@ -933,7 +933,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
public void closeCQ(VM vm, final String cqName) throws Exception {
vm.invoke(new CacheSerializableRunnable("Close CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Close CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -966,7 +966,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
Region region = null;
try {
region = getRootRegion().getSubregion(regionName);
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
} catch (Exception cqe) {
AssertionError err = new AssertionError("Failed to get Region.");
err.initCause(cqe);
@@ -993,7 +993,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
public void executeAndCloseAndExecuteIRMultipleTimes(VM vm, final String cqName, final String queryStr) {
vm.invoke(new CacheSerializableRunnable("Create CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1004,7 +1004,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
cqf.initCqListeners(cqListeners);
CqAttributes cqa = cqf.create();
@@ -1017,7 +1017,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
throw err;
}
@@ -1082,7 +1082,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
private void failIfCQExists(VM vm, final String cqName) {
vm.invoke(new CacheSerializableRunnable("Fail if CQ exists") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Fail if CQ Exists. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Fail if CQ Exists. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1105,7 +1105,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1155,7 +1155,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int totalEvents) {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1459,7 +1459,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
private void validateQuery(VM vm, final String query, final int resultSize) {
vm.invoke(new CacheSerializableRunnable("Validate Query") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating Query. ###");
+ LogWriterUtils.getLogWriter().info("### Validating Query. ###");
QueryService qs = getCache().getQueryService();
Query q = qs.newQuery(query);
@@ -1467,7 +1467,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
Object r = q.execute();
if(r instanceof Collection){
int rSize = ((Collection)r).size();
- LogWriterSupport.getLogWriter().info("### Result Size is :" + rSize);
+ LogWriterUtils.getLogWriter().info("### Result Size is :" + rSize);
assertEquals(rSize, rSize);
}
}
@@ -1518,7 +1518,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Stop CQ :" + cqName) {
public void run2() throws CacheException {
CqQuery cq1 = null;
- LogWriterSupport.getLogWriter().info("### CQ attributes mutator for ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### CQ attributes mutator for ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1619,7 +1619,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server2);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -1662,7 +1662,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
createClient(client, thePort, host0);
@@ -1832,7 +1832,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
createClient(client, thePort, host0);
/* Create CQs. */
@@ -1915,7 +1915,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
createClient(client, thePort, host0);
/* Create CQs. */
@@ -2011,7 +2011,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
createClient(client, thePort, host0);
/* debug */
@@ -2102,12 +2102,12 @@ public class CqQueryDUnitTest extends CacheTestCase {
} catch (com.gemstone.gemfire.test.dunit.RMIException rmiExc) {
Throwable cause = rmiExc.getCause();
if (!(cause instanceof AssertionError)) {
- LogWriterSupport.getLogWriter().severe("Expected to see an AssertionError.", cause);
+ LogWriterUtils.getLogWriter().severe("Expected to see an AssertionError.", cause);
fail("wrong error");
}
Throwable causeCause = cause.getCause(); // should be a RegionNotFoundException
if (!(causeCause instanceof RegionNotFoundException)) {
- LogWriterSupport.getLogWriter().severe("Expected cause to be RegionNotFoundException", cause);
+ LogWriterUtils.getLogWriter().severe("Expected cause to be RegionNotFoundException", cause);
fail("wrong cause");
}
}
@@ -2123,14 +2123,14 @@ public class CqQueryDUnitTest extends CacheTestCase {
client.invoke(new CacheSerializableRunnable("CloseAll CQ :") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close All CQ. ###");
+ LogWriterUtils.getLogWriter().info("### Close All CQ. ###");
// Get CQ Service.
QueryService cqService = null;
try {
cqService = getCache().getQueryService();
} catch (Exception cqe) {
cqe.printStackTrace();
- LogWriterSupport.getLogWriter().info("Failed to getCQService.", cqe);
+ LogWriterUtils.getLogWriter().info("Failed to getCQService.", cqe);
fail("Failed to getCQService.");
}
@@ -2139,7 +2139,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
cqService.closeCqs();
} catch (Exception ex){
ex.printStackTrace();
- LogWriterSupport.getLogWriter().info("Failed to close All CQ.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to close All CQ.", ex);
fail("Failed to close All CQ. " + ex.getMessage());
}
}
@@ -2159,7 +2159,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
// Call close all CQ.
client.invoke(new CacheSerializableRunnable("CloseAll CQ 2 :") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close All CQ 2. ###");
+ LogWriterUtils.getLogWriter().info("### Close All CQ 2. ###");
// Get CQ Service.
QueryService cqService = null;
try {
@@ -2197,7 +2197,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
createClient(client, thePort, host0);
@@ -2281,7 +2281,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
/* Create Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
createClient(client1, thePort, host0);
createClient(client2, thePort, host0);
@@ -2431,7 +2431,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -2502,7 +2502,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -2605,7 +2605,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int thePort = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -2670,7 +2670,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
value.put("field2", "key" + i);
exampleRegion.put(KEY + i, value);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"### Number of Entries in Region :" + exampleRegion.keys().size());
}
});
@@ -2689,7 +2689,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -2831,7 +2831,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -2873,7 +2873,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Create client.
// Properties props = new Properties();
// Create client with redundancyLevel -1
@@ -2961,7 +2961,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
@@ -3059,7 +3059,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server2);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int thePort2 = server2.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
@@ -3119,7 +3119,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3188,7 +3188,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createLocalRegion(client, new int[] {thePort}, host0, "-1", new String[]{regions[0]});
@@ -3224,7 +3224,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int thePort = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3275,7 +3275,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
// Create region with Global scope
AttributesFactory factory1 = new AttributesFactory();
@@ -3309,7 +3309,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int thePort2 = server2.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
@@ -3376,7 +3376,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3408,7 +3408,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3460,7 +3460,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3512,7 +3512,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
createClient(client, new int[] { port1, ports[0] }, host0, "-1");
@@ -3560,7 +3560,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
createClient(client, new int[] { port1, ports[0] }, host0, "-1");
@@ -3607,7 +3607,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
@@ -3669,7 +3669,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
createClient(client, new int[] { port1, ports[0] }, host0, "-1");
@@ -3718,7 +3718,7 @@ public class CqQueryDUnitTest extends CacheTestCase {
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
createServer(server2, ports[0]);
@@ -3771,7 +3771,7 @@ public void testCqCloseAndExecuteWithInitialResults() throws Exception {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3800,7 +3800,7 @@ public void testCQEventsWithNotEqualsUndefined() throws Exception {
createServer(server);
final int thePort = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -3899,14 +3899,14 @@ public void testCQEventsWithNotEqualsUndefined() throws Exception {
server.invoke(new CacheSerializableRunnable("Server Region Entries") {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("### Entries in Server :" + region.keys().size());
+ LogWriterUtils.getLogWriter().info("### Entries in Server :" + region.keys().size());
}
});
client.invoke(new CacheSerializableRunnable("Client Region Entries") {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("### Entries in Client :" + region.keys().size());
+ LogWriterUtils.getLogWriter().info("### Entries in Client :" + region.keys().size());
}
});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryOptimizedExecuteDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryOptimizedExecuteDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryOptimizedExecuteDUnitTest.java
index 32b9f94..6be233a 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryOptimizedExecuteDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryOptimizedExecuteDUnitTest.java
@@ -26,8 +26,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -74,7 +74,7 @@ public class CqQueryOptimizedExecuteDUnitTest extends CqQueryDUnitTest{
final int thePort = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -101,7 +101,7 @@ public class CqQueryOptimizedExecuteDUnitTest extends CqQueryDUnitTest{
for (int i = numOfEntries+1; i <= numOfEntries*2; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
@@ -173,7 +173,7 @@ public class CqQueryOptimizedExecuteDUnitTest extends CqQueryDUnitTest{
final int thePort = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -201,7 +201,7 @@ public class CqQueryOptimizedExecuteDUnitTest extends CqQueryDUnitTest{
for (int i = numOfEntries+1; i <= numOfEntries*2; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
@@ -268,7 +268,7 @@ public class CqQueryOptimizedExecuteDUnitTest extends CqQueryDUnitTest{
final int port1 = server1.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
createServer(server2, ports[0]);
[25/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkRemoteVMDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkRemoteVMDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkRemoteVMDUnitTest.java
index cec5032..6e6b6c8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkRemoteVMDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkRemoteVMDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.cache.util.CacheWriterAdapter;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -84,7 +84,7 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0.invoke(PutAllCallBkRemoteVMDUnitTest.class, "createCacheForVM0");
vm1.invoke(PutAllCallBkRemoteVMDUnitTest.class, "createCacheForVM1");
- LogWriterSupport.getLogWriter().info("Cache created successfully");
+ LogWriterUtils.getLogWriter().info("Cache created successfully");
}
public void preTearDown(){
@@ -170,7 +170,7 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
}catch (Exception ex){
throw new RuntimeException("exception putting entries", ex);
}
- LogWriterSupport.getLogWriter().info("****************paperRegion.get(afterCreate)***************"+paperRegion.get("afterCreate"));
+ LogWriterUtils.getLogWriter().info("****************paperRegion.get(afterCreate)***************"+paperRegion.get("afterCreate"));
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -340,9 +340,9 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
if(counter==null) counter = new Integer(1);
paperRegion.put("afterCreate",new Integer(counter.intValue()+1));
- LogWriterSupport.getLogWriter().info("In afterCreate"+putAllcounter);
+ LogWriterUtils.getLogWriter().info("In afterCreate"+putAllcounter);
if(putAllcounter == forCreate){
- LogWriterSupport.getLogWriter().info("performingtrue");
+ LogWriterUtils.getLogWriter().info("performingtrue");
afterCreate = true;
}
try{
@@ -353,7 +353,7 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
}
notified = true;
- LogWriterSupport.getLogWriter().info("*******afterCreate***** Key :"+event.getKey()+ " Value :"+event.getNewValue());
+ LogWriterUtils.getLogWriter().info("*******afterCreate***** Key :"+event.getKey()+ " Value :"+event.getNewValue());
}
public void afterUpdate(EntryEvent event){
@@ -361,9 +361,9 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
Integer counter = (Integer)paperRegion.get("afterUpdate");
if(counter==null) counter = new Integer(1);
paperRegion.put("afterUpdate",new Integer(counter.intValue()+1));
- LogWriterSupport.getLogWriter().info("In afterUpdate"+afterUpdateputAllcounter);
+ LogWriterUtils.getLogWriter().info("In afterUpdate"+afterUpdateputAllcounter);
if(afterUpdateputAllcounter == forUpdate){
- LogWriterSupport.getLogWriter().info("performingtrue afterUpdate");
+ LogWriterUtils.getLogWriter().info("performingtrue afterUpdate");
afterUpdate = true;
}
try{
@@ -376,7 +376,7 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
notified = true;
- LogWriterSupport.getLogWriter().info("*******afterUpdate***** Key :"+event.getKey()+ " Value :"+event.getNewValue());
+ LogWriterUtils.getLogWriter().info("*******afterUpdate***** Key :"+event.getKey()+ " Value :"+event.getNewValue());
}
}
@@ -386,15 +386,15 @@ public class PutAllCallBkRemoteVMDUnitTest extends DistributedTestCase {
Integer counter = (Integer)paperRegion.get("beforeCreate");
if(counter==null) counter = new Integer(1);
paperRegion.put("beforeCreate",new Integer(counter.intValue()+1));
- LogWriterSupport.getLogWriter().info("*******BeforeCreate***** event="+event);
+ LogWriterUtils.getLogWriter().info("*******BeforeCreate***** event="+event);
}
public void beforeUpdate(EntryEvent event) {
Integer counter = (Integer)paperRegion.get("beforeUpdate");
if(counter==null) counter = new Integer(1);
paperRegion.put("beforeUpdate",new Integer(counter.intValue()+1));
- LogWriterSupport.getLogWriter().info("In beforeUpdate"+beforeUpdateputAllcounter);
- LogWriterSupport.getLogWriter().info("*******BeforeUpdate***** event="+event);
+ LogWriterUtils.getLogWriter().info("In beforeUpdate"+beforeUpdateputAllcounter);
+ LogWriterUtils.getLogWriter().info("*******BeforeUpdate***** event="+event);
}
}
}// end of test class
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkSingleVMDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkSingleVMDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkSingleVMDUnitTest.java
index f7e9fc3..82f3477 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkSingleVMDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PutAllCallBkSingleVMDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
public class PutAllCallBkSingleVMDUnitTest extends DistributedTestCase{
@@ -74,7 +74,7 @@ public class PutAllCallBkSingleVMDUnitTest extends DistributedTestCase{
VM vm1 = host.getVM(1);
vm0.invoke(PutAllCallBkSingleVMDUnitTest.class, "createCache");
vm1.invoke(PutAllCallBkSingleVMDUnitTest.class, "createCache");
- LogWriterSupport.getLogWriter().fine("Cache created in successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created in successfully");
}
public void preTearDown(){
@@ -267,12 +267,12 @@ public class PutAllCallBkSingleVMDUnitTest extends DistributedTestCase{
static class AfterCreateCallback extends CacheListenerAdapter {
public void afterCreate(EntryEvent event){
putAllcounter++;
- LogWriterSupport.getLogWriter().fine("In afterCreate"+putAllcounter);
+ LogWriterUtils.getLogWriter().fine("In afterCreate"+putAllcounter);
if (event.getOperation().isPutAll()) {
assertEquals("putAllCreateCallback", event.getCallbackArgument());
}
if(putAllcounter == 25){
- LogWriterSupport.getLogWriter().fine("performingtrue");
+ LogWriterUtils.getLogWriter().fine("performingtrue");
afterCreate = true;
}
}
@@ -281,12 +281,12 @@ public class PutAllCallBkSingleVMDUnitTest extends DistributedTestCase{
static class AfterUpdateCallback extends CacheListenerAdapter {
public void afterUpdate(EntryEvent event){
afterUpdateputAllcounter++;
- LogWriterSupport.getLogWriter().fine("In afterUpdate"+afterUpdateputAllcounter);
+ LogWriterUtils.getLogWriter().fine("In afterUpdate"+afterUpdateputAllcounter);
if (event.getOperation().isPutAll()) {
assertEquals("putAllAfterUpdateCallback", event.getCallbackArgument());
}
if(afterUpdateputAllcounter == 5){
- LogWriterSupport.getLogWriter().fine("performingtrue afterUpdate");
+ LogWriterUtils.getLogWriter().fine("performingtrue afterUpdate");
afterUpdate = true;
}
}
@@ -294,12 +294,12 @@ public class PutAllCallBkSingleVMDUnitTest extends DistributedTestCase{
static class BeforeCreateCallback extends CacheWriterAdapter {
public void beforeCreate(EntryEvent event){
beforeCreateputAllcounter++;
- LogWriterSupport.getLogWriter().fine("In beforeCreate"+beforeCreateputAllcounter);
+ LogWriterUtils.getLogWriter().fine("In beforeCreate"+beforeCreateputAllcounter);
if (event.getOperation().isPutAll()) {
assertEquals("putAllCreateCallback", event.getCallbackArgument());
}
if(beforeCreateputAllcounter == 25){
- LogWriterSupport.getLogWriter().fine("performingtrue beforeCreateputAll");
+ LogWriterUtils.getLogWriter().fine("performingtrue beforeCreateputAll");
beforeCreate = true;
}
}
@@ -307,12 +307,12 @@ public class PutAllCallBkSingleVMDUnitTest extends DistributedTestCase{
static class BeforeUpdateCallback extends CacheWriterAdapter {
public void beforeUpdate(EntryEvent event){
beforeUpdateputAllcounter++;
- LogWriterSupport.getLogWriter().fine("In beforeUpdate"+beforeUpdateputAllcounter);
+ LogWriterUtils.getLogWriter().fine("In beforeUpdate"+beforeUpdateputAllcounter);
if (event.getOperation().isPutAll()) {
assertEquals("putAllAfterUpdateCallback", event.getCallbackArgument());
}
if(beforeUpdateputAllcounter == 5){
- LogWriterSupport.getLogWriter().fine("performingtrue beforeUpdate");
+ LogWriterUtils.getLogWriter().fine("performingtrue beforeUpdate");
beforeUpdate = true;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RRSynchronizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RRSynchronizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RRSynchronizationDUnitTest.java
index f0149f1..916634b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RRSynchronizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RRSynchronizationDUnitTest.java
@@ -44,10 +44,10 @@ import com.gemstone.gemfire.internal.cache.VMCachedDeserializable;
import com.gemstone.gemfire.internal.cache.versions.VMVersionTag;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
-import com.gemstone.gemfire.test.dunit.DistributedSystemSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -116,7 +116,7 @@ public class RRSynchronizationDUnitTest extends CacheTestCase {
// Now we crash the member who "modified" vm1's cache.
// The other replicates should perform a delta-GII for the lost member and
// get back in sync
- DistributedSystemSupport.crashDistributedSystem(vm0);
+ DistributedTestUtils.crashDistributedSystem(vm0);
verifySynchronized(vm2, crashedID);
} finally {
@@ -163,7 +163,7 @@ public class RRSynchronizationDUnitTest extends CacheTestCase {
tag.setEntryVersion(1);
tag.setIsRemoteForTesting();
EntryEventImpl event = EntryEventImpl.create(dr, Operation.CREATE, "Object3", true, forMember, true, false);
- LogWriterSupport.getLogWriter().info("applying this event to the cache: " + event);
+ LogWriterUtils.getLogWriter().info("applying this event to the cache: " + event);
event.setNewValue(new VMCachedDeserializable("value3", 12));
event.setVersionTag(tag);
dr.getRegionMap().basicPut(event, System.currentTimeMillis(), true, false, null, false, false);
@@ -178,12 +178,12 @@ public class RRSynchronizationDUnitTest extends CacheTestCase {
event = EntryEventImpl.create(dr, Operation.CREATE, "Object5", true, forMember, true, false);
event.setNewValue(Token.TOMBSTONE);
event.setVersionTag(tag);
- LogWriterSupport.getLogWriter().info("applying this event to the cache: " + event);
+ LogWriterUtils.getLogWriter().info("applying this event to the cache: " + event);
dr.getRegionMap().basicPut(event, System.currentTimeMillis(), true, false, null, false, false);
event.release();
dr.dumpBackingMap();
- LogWriterSupport.getLogWriter().info("version vector is now " + dr.getVersionVector().fullToString());
+ LogWriterUtils.getLogWriter().info("version vector is now " + dr.getVersionVector().fullToString());
assertTrue("should hold entry Object3 now", dr.containsKey("Object3"));
return true;
}
@@ -199,12 +199,12 @@ public class RRSynchronizationDUnitTest extends CacheTestCase {
boolean dumped = false;
public boolean done() {
if (TestRegion.getCache().getDistributionManager().isCurrentMember(crashedMember)) {
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
if (!TestRegion.containsKey("Object3")) {
waitingFor = "entry for Object3 not found";
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
RegionEntry re = dr.getRegionMap().getEntry("Object5");
@@ -214,7 +214,7 @@ public class RRSynchronizationDUnitTest extends CacheTestCase {
dr.dumpBackingMap();
}
waitingFor = "entry for Object5 not found";
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
if (!re.isTombstone()) {
@@ -223,7 +223,7 @@ public class RRSynchronizationDUnitTest extends CacheTestCase {
dr.dumpBackingMap();
}
waitingFor = "Object5 is not a tombstone but should be: " + re;
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
return true;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ReconnectDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ReconnectDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ReconnectDUnitTest.java
index 55df06f..aa1949b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ReconnectDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ReconnectDUnitTest.java
@@ -56,15 +56,15 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.xmlcache.CacheXmlGenerator;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedSystemSupport;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -114,7 +114,7 @@ public class ReconnectDUnitTest extends CacheTestCase
//Cache cache = getCache();
closeCache();
getSystem().disconnect();
- LogWriterSupport.getLogWriter().fine("Cache Closed ");
+ LogWriterUtils.getLogWriter().fine("Cache Closed ");
}
@Override
@@ -127,7 +127,7 @@ public class ReconnectDUnitTest extends CacheTestCase
dsProperties.put(DistributionConfig.LOCATORS_NAME, "localHost["+this.locatorPort+"]");
dsProperties.put(DistributionConfig.MCAST_PORT_NAME, "0");
dsProperties.put(DistributionConfig.MEMBER_TIMEOUT_NAME, "1000");
- dsProperties.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ dsProperties.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
}
return dsProperties;
}
@@ -138,7 +138,7 @@ public class ReconnectDUnitTest extends CacheTestCase
Host.getHost(0).getVM(locatorVMNumber).invoke(new SerializableRunnable("stop locator") {
public void run() {
if (locator != null) {
- LogWriterSupport.getLogWriter().info("stopping locator " + locator);
+ LogWriterUtils.getLogWriter().info("stopping locator " + locator);
locator.stop();
}
}
@@ -238,7 +238,7 @@ public class ReconnectDUnitTest extends CacheTestCase
waitForReconnect(vm1);
System.out.println("done reconnecting vm0 and vm1");
} catch (Exception e) {
- Threads.dumpAllStacks();
+ ThreadUtils.dumpAllStacks();
throw e;
}
}
@@ -267,7 +267,7 @@ public class ReconnectDUnitTest extends CacheTestCase
final int locPort = locatorPort;
final int secondLocPort = AvailablePortHelper.getRandomAvailableTCPPort();
- DistributedSystemSupport.deleteLocatorStateFile(locPort, secondLocPort);
+ DistributedTestUtils.deleteLocatorStateFile(locPort, secondLocPort);
final String xmlFileLoc = (new File(".")).getAbsolutePath();
@@ -322,7 +322,7 @@ public class ReconnectDUnitTest extends CacheTestCase
}
try {
new CacheFactory(props).create();
- LogWriterSupport.getLogWriter().error("testReconnectCollidesWithApplication failed - application thread was able to create a cache");
+ LogWriterUtils.getLogWriter().error("testReconnectCollidesWithApplication failed - application thread was able to create a cache");
} catch (IllegalStateException cacheExists) {
// expected
}
@@ -350,8 +350,8 @@ public class ReconnectDUnitTest extends CacheTestCase
return "waiting for ds to begin reconnecting";
}
}, 30000, 1000, true);
- LogWriterSupport.getLogWriter().info("entering reconnect wait for " + ds);
- LogWriterSupport.getLogWriter().info("ds.isReconnecting() = " + ds.isReconnecting());
+ LogWriterUtils.getLogWriter().info("entering reconnect wait for " + ds);
+ LogWriterUtils.getLogWriter().info("ds.isReconnecting() = " + ds.isReconnecting());
boolean failure = true;
try {
ds.waitUntilReconnected(60, TimeUnit.SECONDS);
@@ -364,7 +364,7 @@ public class ReconnectDUnitTest extends CacheTestCase
failure = false;
return ds.getReconnectedSystem().getDistributedMember();
} catch (InterruptedException e) {
- LogWriterSupport.getLogWriter().warning("interrupted while waiting for reconnect");
+ LogWriterUtils.getLogWriter().warning("interrupted while waiting for reconnect");
return null;
} finally {
if (failure) {
@@ -393,7 +393,7 @@ public class ReconnectDUnitTest extends CacheTestCase
assertFalse(ds.isReconnecting());
DistributedSystem newDs = InternalDistributedSystem.getAnyInstance();
if (newDs != null) {
- LogWriterSupport.getLogWriter().warning("expected distributed system to be disconnected: " + newDs);
+ LogWriterUtils.getLogWriter().warning("expected distributed system to be disconnected: " + newDs);
return false;
}
return true;
@@ -406,8 +406,8 @@ public class ReconnectDUnitTest extends CacheTestCase
forceDisconnect(vm1);
newdm = waitForReconnect(vm1);
assertNotSame("expected a reconnect to occur in member", dm, newdm);
- DistributedSystemSupport.deleteLocatorStateFile(locPort);
- DistributedSystemSupport.deleteLocatorStateFile(secondLocPort);
+ DistributedTestUtils.deleteLocatorStateFile(locPort);
+ DistributedTestUtils.deleteLocatorStateFile(secondLocPort);
}
private DistributedMember getDMID(VM vm) {
@@ -434,7 +434,7 @@ public class ReconnectDUnitTest extends CacheTestCase
}
}, 30000, 1000, true);
long waitTime = 120;
- LogWriterSupport.getLogWriter().info("VM"+VM.getCurrentVMNum() + " waiting up to "+waitTime+" seconds for reconnect to complete");
+ LogWriterUtils.getLogWriter().info("VM"+VM.getCurrentVMNum() + " waiting up to "+waitTime+" seconds for reconnect to complete");
try {
ds.waitUntilReconnected(waitTime, TimeUnit.SECONDS);
} catch (InterruptedException e) {
@@ -460,7 +460,7 @@ public class ReconnectDUnitTest extends CacheTestCase
final int locPort = locatorPort;
final int secondLocPort = AvailablePortHelper.getRandomAvailableTCPPort();
- DistributedSystemSupport.deleteLocatorStateFile(locPort, secondLocPort);
+ DistributedTestUtils.deleteLocatorStateFile(locPort, secondLocPort);
final String xmlFileLoc = (new File(".")).getAbsolutePath();
@@ -532,11 +532,11 @@ public class ReconnectDUnitTest extends CacheTestCase
};
Wait.waitForCriterion(wc, 30000, 1000, false);
if (Locator.getLocator() == null) {
- LogWriterSupport.getLogWriter().error("expected to find a running locator but getLocator() returns null");
+ LogWriterUtils.getLogWriter().error("expected to find a running locator but getLocator() returns null");
return false;
}
if (((InternalLocator)Locator.getLocator()).isStopped()) {
- LogWriterSupport.getLogWriter().error("found a stopped locator");
+ LogWriterUtils.getLogWriter().error("found a stopped locator");
return false;
}
return true;
@@ -568,8 +568,8 @@ public class ReconnectDUnitTest extends CacheTestCase
gfshThread = null;
}
});
- DistributedSystemSupport.deleteLocatorStateFile(locPort);
- DistributedSystemSupport.deleteLocatorStateFile(secondLocPort);
+ DistributedTestUtils.deleteLocatorStateFile(locPort);
+ DistributedTestUtils.deleteLocatorStateFile(secondLocPort);
}
}
@@ -625,7 +625,7 @@ public class ReconnectDUnitTest extends CacheTestCase
locatorPort = locPort;
Properties config = getDistributedSystemProperties();
config.put(DistributionConfig.ROLES_NAME, "");
- config.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ config.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
// config.put("log-file", "roleLossController.log");
//creating the DS
getSystem(config);
@@ -654,7 +654,7 @@ public class ReconnectDUnitTest extends CacheTestCase
closeCache();
getSystem().disconnect();
- LogWriterSupport.getLogWriter().info("disconnected from the system...");
+ LogWriterUtils.getLogWriter().info("disconnected from the system...");
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
@@ -665,14 +665,14 @@ public class ReconnectDUnitTest extends CacheTestCase
"ROLERECONNECTTESTS") {
public void run2() throws CacheException, RuntimeException
{
- LogWriterSupport.getLogWriter().info("####### STARTING THE REAL TEST ##########");
+ LogWriterUtils.getLogWriter().info("####### STARTING THE REAL TEST ##########");
locatorPort = locPort;
Properties props = getDistributedSystemProperties();
props.put("cache-xml-file", xmlFileLoc+File.separator+"RoleReconnect-cache.xml");
props.put("max-wait-time-reconnect", "200");
final int timeReconnect = 3;
props.put("max-num-reconnect-tries", "3");
- props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.put("log-file", "roleLossVM0.log");
getSystem(props);
@@ -686,13 +686,13 @@ public class ReconnectDUnitTest extends CacheTestCase
throw new RuntimeException("The test should throw a CancelException ");
}
catch (CancelException ignor){ // can be caused by role loss during intialization.
- LogWriterSupport.getLogWriter().info("Got Expected CancelException ");
+ LogWriterUtils.getLogWriter().info("Got Expected CancelException ");
}
finally {
system.getLogWriter().info("<ExpectedException action=remove>"
+ "CacheClosedException" + "</ExpectedException");
}
- LogWriterSupport.getLogWriter().fine("roleLoss Sleeping SO call dumprun.sh");
+ LogWriterUtils.getLogWriter().fine("roleLoss Sleeping SO call dumprun.sh");
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return reconnectTries >= timeReconnect;
@@ -702,7 +702,7 @@ public class ReconnectDUnitTest extends CacheTestCase
}
};
Wait.waitForCriterion(ev, 60 * 1000, 200, true);
- LogWriterSupport.getLogWriter().fine("roleLoss done Sleeping");
+ LogWriterUtils.getLogWriter().fine("roleLoss done Sleeping");
assertEquals(timeReconnect,
reconnectTries);
}
@@ -758,7 +758,7 @@ public class ReconnectDUnitTest extends CacheTestCase
locatorPort = locPort;
Properties config = getDistributedSystemProperties();
config.put(DistributionConfig.ROLES_NAME, "");
- config.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ config.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
//creating the DS
getSystem(config);
@@ -812,7 +812,7 @@ public class ReconnectDUnitTest extends CacheTestCase
+ " trying to reconnect");
final AsyncInvocation roleLossAsync = vm0.invokeAsync(roleLoss);
- LogWriterSupport.getLogWriter().info("waiting for role loss vm to start reconnect attempts");
+ LogWriterUtils.getLogWriter().info("waiting for role loss vm to start reconnect attempts");
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -853,15 +853,15 @@ public class ReconnectDUnitTest extends CacheTestCase
getLogWriter().severe("Exception : "+ee);
}
}*/
- LogWriterSupport.getLogWriter().info("waiting for vm0 to finish reconnecting");
- Threads.join(roleLossAsync, 120 * 1000, LogWriterSupport.getLogWriter());
+ LogWriterUtils.getLogWriter().info("waiting for vm0 to finish reconnecting");
+ ThreadUtils.join(roleLossAsync, 120 * 1000);
}
if (roleLossAsync.getException() != null){
Assert.fail("Exception in Vm0", roleLossAsync.getException());
}
- Threads.join(avkVm1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(avkVm1, 30 * 1000);
if (avkVm1.getException() != null){
Assert.fail("Exception in Vm1", avkVm1.getException());
}
@@ -879,7 +879,7 @@ public class ReconnectDUnitTest extends CacheTestCase
try {
// closeCache();
// getSystem().disconnect();
- LogWriterSupport.getLogWriter().info(startupMessage);
+ LogWriterUtils.getLogWriter().info(startupMessage);
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return ((Boolean)otherVM.invoke(ReconnectDUnitTest.class, "isInitialRolePlayerStarted")).booleanValue();
@@ -890,13 +890,13 @@ public class ReconnectDUnitTest extends CacheTestCase
};
Wait.waitForCriterion(ev, 10 * 1000, 200, true);
- LogWriterSupport.getLogWriter().info("Starting the test and creating the cache and regions etc ...");
+ LogWriterUtils.getLogWriter().info("Starting the test and creating the cache and regions etc ...");
locatorPort = locPort;
Properties props = getDistributedSystemProperties();
props.put("cache-xml-file", "RoleRegained.xml");
props.put("max-wait-time-reconnect", "3000");
props.put("max-num-reconnect-tries", "8");
- props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
getSystem(props);
system.getLogWriter().info("<ExpectedException action=add>"
@@ -906,7 +906,7 @@ public class ReconnectDUnitTest extends CacheTestCase
getCache();
} catch (CancelException e) {
// can happen if RoleA goes away during initialization
- LogWriterSupport.getLogWriter().info("cache threw CancelException while creating the cache");
+ LogWriterUtils.getLogWriter().info("cache threw CancelException while creating the cache");
}
initialized = true;
@@ -915,7 +915,7 @@ public class ReconnectDUnitTest extends CacheTestCase
ev = new WaitCriterion() {
public boolean done() {
- LogWriterSupport.getLogWriter().info("ReconnectTries=" + reconnectTries);
+ LogWriterUtils.getLogWriter().info("ReconnectTries=" + reconnectTries);
return reconnectTries != 0;
}
public String description() {
@@ -972,8 +972,8 @@ public class ReconnectDUnitTest extends CacheTestCase
excuse = "value is wrong";
return false;
}
- LogWriterSupport.getLogWriter().info("All assertions passed");
- LogWriterSupport.getLogWriter().info("MyKey : "+key+" and myvalue : "+value);
+ LogWriterUtils.getLogWriter().info("All assertions passed");
+ LogWriterUtils.getLogWriter().info("MyKey : "+key+" and myvalue : "+value);
return true;
}
catch (CancelException ecc){
@@ -984,7 +984,7 @@ public class ReconnectDUnitTest extends CacheTestCase
}
finally {
- LogWriterSupport.getLogWriter().info("waiting for reconnect. Current status is '"+excuse+"'");
+ LogWriterUtils.getLogWriter().info("waiting for reconnect. Current status is '"+excuse+"'");
}
return false;
}
@@ -1005,11 +1005,11 @@ public class ReconnectDUnitTest extends CacheTestCase
throw e;
}
catch (Error th) {
- LogWriterSupport.getLogWriter().severe("DEBUG", th);
+ LogWriterUtils.getLogWriter().severe("DEBUG", th);
throw th;
} finally {
if (t != null) {
- Threads.join(t, 2 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(t, 2 * 60 * 1000);
}
// greplogs won't care if you remove an exception that was never added,
// and this ensures that it gets removed.
@@ -1029,12 +1029,12 @@ public class ReconnectDUnitTest extends CacheTestCase
"second RoleA player") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info(startupMessage);
+ LogWriterUtils.getLogWriter().info(startupMessage);
//closeCache();
// getSystem().disconnect();
locatorPort = locPort;
Properties props = getDistributedSystemProperties();
- props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.put(DistributionConfig.ROLES_NAME, "RoleA");
getSystem(props);
@@ -1045,7 +1045,7 @@ public class ReconnectDUnitTest extends CacheTestCase
RegionAttributes attr = fac.create();
Region region = createRootRegion(regionName, attr);
- LogWriterSupport.getLogWriter().info("STARTED THE REQUIREDROLES CACHE");
+ LogWriterUtils.getLogWriter().info("STARTED THE REQUIREDROLES CACHE");
try{
Thread.sleep(120);
}
@@ -1062,7 +1062,7 @@ public class ReconnectDUnitTest extends CacheTestCase
catch(InterruptedException ee){
fail("interrupted");
}
- LogWriterSupport.getLogWriter().info("RolePlayer is done...");
+ LogWriterUtils.getLogWriter().info("RolePlayer is done...");
}
@@ -1081,10 +1081,10 @@ public class ReconnectDUnitTest extends CacheTestCase
{
// closeCache();
// getSystem().disconnect();
- LogWriterSupport.getLogWriter().info(startupMessage);
+ LogWriterUtils.getLogWriter().info(startupMessage);
locatorPort = locPort;
Properties props = getDistributedSystemProperties();
- props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.put(DistributionConfig.ROLES_NAME, "RoleA");
getSystem(props);
@@ -1095,7 +1095,7 @@ public class ReconnectDUnitTest extends CacheTestCase
RegionAttributes attr = fac.create();
createRootRegion(regionName, attr);
- LogWriterSupport.getLogWriter().info("STARTED THE REQUIREDROLES CACHE");
+ LogWriterUtils.getLogWriter().info("STARTED THE REQUIREDROLES CACHE");
initialRolePlayerStarted = true;
while(!((Boolean)otherVM.invoke(ReconnectDUnitTest.class, "isInitialized")).booleanValue()){
@@ -1105,7 +1105,7 @@ public class ReconnectDUnitTest extends CacheTestCase
fail("interrupted");
}
}
- LogWriterSupport.getLogWriter().info("RoleAPlayerInitializer is done...");
+ LogWriterUtils.getLogWriter().info("RoleAPlayerInitializer is done...");
closeCache();
}
@@ -1115,10 +1115,10 @@ public class ReconnectDUnitTest extends CacheTestCase
void addReconnectListener() {
reconnectTries = 0; // reset the count for this listener
- LogWriterSupport.getLogWriter().info("adding reconnect listener");
+ LogWriterUtils.getLogWriter().info("adding reconnect listener");
ReconnectListener reconlis = new ReconnectListener() {
public void reconnecting(InternalDistributedSystem oldSys) {
- LogWriterSupport.getLogWriter().info("reconnect listener invoked");
+ LogWriterUtils.getLogWriter().info("reconnect listener invoked");
reconnectTries++;
}
public void onReconnect(InternalDistributedSystem system1, InternalDistributedSystem system2) {}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionExpirationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionExpirationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionExpirationDUnitTest.java
index a6e228c..4187aaf 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionExpirationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionExpirationDUnitTest.java
@@ -24,7 +24,7 @@ import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -164,9 +164,9 @@ public class RegionExpirationDUnitTest extends CacheTestCase {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter().info("vm0 is " + vm0.getPid() + ", vm1 is " + vm1);
+ LogWriterUtils.getLogWriter().info("vm0 is " + vm0.getPid() + ", vm1 is " + vm1);
- LogWriterSupport.getLogWriter().info("2: " + regionName + " action is " + action);
+ LogWriterUtils.getLogWriter().info("2: " + regionName + " action is " + action);
final long tilt = System.currentTimeMillis() + timeoutSecs * 1000;
@@ -218,7 +218,7 @@ public class RegionExpirationDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Get") {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("3: " + regionName + ", " + region + ", action is " + action);
+ LogWriterUtils.getLogWriter().info("3: " + regionName + ", " + region + ", action is " + action);
if (action.isInvalidate() || action.isLocalInvalidate()) {
assertTrue(!region.containsValueForKey(key));
} else {
@@ -255,7 +255,7 @@ public class RegionExpirationDUnitTest extends CacheTestCase {
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setEarlyAck(false);
RegionAttributes attrs = factory.create();
- LogWriterSupport.getLogWriter().info("4: " + regionName + " ttl action is " + ttl);
+ LogWriterUtils.getLogWriter().info("4: " + regionName + " ttl action is " + ttl);
getOrCreateRootRegion().createSubregion(regionName, attrs);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionMembershipListenerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionMembershipListenerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionMembershipListenerDUnitTest.java
index 055bf36..d539b82 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionMembershipListenerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionMembershipListenerDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.internal.cache.CacheDistributionAdvisor.CacheProfile
import com.gemstone.gemfire.internal.cache.DistributedRegion;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -367,7 +367,7 @@ public class RegionMembershipListenerDUnitTest extends CacheTestCase {
return MyRML.this.toString() + " waiting for Op " + op + " when lastOp was " + getOpName(MyRML.this.lastOp);
}
};
- LogWriterSupport.getLogWriter().info(this.toString() + " waiting for Op " + getOpName(op)
+ LogWriterUtils.getLogWriter().info(this.toString() + " waiting for Op " + getOpName(op)
+ " when lastOp was " + getOpName(this.lastOp));
Wait.waitForCriterion(ev, this.timeOut, 200, true);
assertEquals(op, this.lastOp);
@@ -388,7 +388,7 @@ public class RegionMembershipListenerDUnitTest extends CacheTestCase {
this.lastOp = Op.Initial;
this.lastEvent = null;
this.initialMembers = initialMembers;
- LogWriterSupport.getLogWriter().info(this.toString() + " received initialMembers notification for region " + r
+ LogWriterUtils.getLogWriter().info(this.toString() + " received initialMembers notification for region " + r
+ " with members " + Arrays.deepToString(initialMembers));
}
public void afterRemoteRegionCreate(RegionEvent event) {
@@ -398,23 +398,23 @@ public class RegionMembershipListenerDUnitTest extends CacheTestCase {
if (cacheProfile != null) {
this.memberInitialized = cacheProfile.regionInitialized;
if (!this.memberInitialized) {
- LogWriterSupport.getLogWriter().warning("afterRemoteRegionCreate invoked when member is not done initializing!", new Exception("stack trace"));
+ LogWriterUtils.getLogWriter().warning("afterRemoteRegionCreate invoked when member is not done initializing!", new Exception("stack trace"));
}
- LogWriterSupport.getLogWriter().info(this.toString() + " received afterRemoteRegionCreate notification for event " + event);
+ LogWriterUtils.getLogWriter().info(this.toString() + " received afterRemoteRegionCreate notification for event " + event);
} else {
- LogWriterSupport.getLogWriter().warning("afterRemoteRegionCreate was expecting a profile in the event callback but there was none. " +
+ LogWriterUtils.getLogWriter().warning("afterRemoteRegionCreate was expecting a profile in the event callback but there was none. " +
" This indicates a problem with the test hook DistributedRegion.TEST_HOOK_ADD_PROFILE");
}
}
public void afterRemoteRegionDeparture(RegionEvent event) {
this.lastOp = Op.Departure;
this.lastEvent = event;
- LogWriterSupport.getLogWriter().info(this.toString() + " received afterRemoteRegionDeparture notification for event " + event);
+ LogWriterUtils.getLogWriter().info(this.toString() + " received afterRemoteRegionDeparture notification for event " + event);
}
public void afterRemoteRegionCrash(RegionEvent event) {
this.lastOp = Op.Crash;
this.lastEvent = event;
- LogWriterSupport.getLogWriter().info(this.toString() + " received afterRemoteRegionCrash notification for event " + event);
+ LogWriterUtils.getLogWriter().info(this.toString() + " received afterRemoteRegionCrash notification for event " + event);
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionReliabilityTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionReliabilityTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionReliabilityTestCase.java
index 5399b4b..2ae8293 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionReliabilityTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionReliabilityTestCase.java
@@ -66,9 +66,8 @@ import com.gemstone.gemfire.internal.cache.TXState;
import com.gemstone.gemfire.internal.cache.TXStateInterface;
import com.gemstone.gemfire.internal.cache.TXStateProxyImpl;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -1414,7 +1413,7 @@ public abstract class RegionReliabilityTestCase extends ReliabilityTestCase {
}
});
- Threads.join(thread, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(thread, 30 * 1000);
assertTrue(region.isDestroyed());
try {
region.put("fee", "fi");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionTestCase.java
index 326bc73..ed89a13 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RegionTestCase.java
@@ -260,7 +260,7 @@ public abstract class RegionTestCase extends CacheTestCase {
assertEquals(value, values.iterator().next());
}
catch (UnsupportedOperationException uoe) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Region.values() reported UnsupportedOperation");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Region.values() reported UnsupportedOperation");
}
}
@@ -464,7 +464,7 @@ public abstract class RegionTestCase extends CacheTestCase {
region.destroy(key);
Region.Entry entry2 = region.getEntry(key);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Found entry for destroyed key: " + entry2);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Found entry for destroyed key: " + entry2);
assertNull(entry2);
if (entry.isLocal()) {
assertTrue(entry.isDestroyed());
@@ -1975,7 +1975,7 @@ public abstract class RegionTestCase extends CacheTestCase {
continue;
}
if (now >= tilt - SLOP) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().warning("Entry invalidated sloppily "
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().warning("Entry invalidated sloppily "
+ "now=" + now + " tilt=" + tilt + " delta = " + (tilt - now));
break;
}
@@ -2046,7 +2046,7 @@ public abstract class RegionTestCase extends CacheTestCase {
continue;
}
if (now >= tilt - SLOP) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().warning("Entry destroyed sloppily "
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().warning("Entry destroyed sloppily "
+ "now=" + now + " tilt=" + tilt + " delta = " + (tilt - now));
break;
}
@@ -2090,7 +2090,7 @@ public abstract class RegionTestCase extends CacheTestCase {
continue;
}
if (now >= tilt - SLOP) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().warning("Region destroyed sloppily "
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().warning("Region destroyed sloppily "
+ "now=" + now + " tilt=" + tilt + " delta = " + (tilt - now));
break;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RequiredRolesDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RequiredRolesDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RequiredRolesDUnitTest.java
index 3e5ed6a..1e69266 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RequiredRolesDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RequiredRolesDUnitTest.java
@@ -35,9 +35,9 @@ import com.gemstone.gemfire.distributed.Role;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.distributed.internal.membership.InternalRole;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -223,7 +223,7 @@ public class RequiredRolesDUnitTest extends ReliabilityTestCase {
// create region in vm3... gain for 2 roles
Host.getHost(0).getVM(vm3).invoke(create);
- Threads.join(threadA, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(threadA, 30 * 1000);
assertTrue(this.finishTestWaitForRequiredRoles);
assertTrue(this.rolesTestWaitForRequiredRoles.isEmpty());
@@ -243,7 +243,7 @@ public class RequiredRolesDUnitTest extends ReliabilityTestCase {
this.finishTestWaitForRequiredRoles = false;
threadA = new Thread(group, runWaitForRequiredRoles);
threadA.start();
- Threads.join(threadA, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(threadA, 30 * 1000);
assertTrue(this.startTestWaitForRequiredRoles);
assertTrue(this.finishTestWaitForRequiredRoles);
assertTrue(this.rolesTestWaitForRequiredRoles.isEmpty());
@@ -256,7 +256,7 @@ public class RequiredRolesDUnitTest extends ReliabilityTestCase {
this.finishTestWaitForRequiredRoles = false;
threadA = new Thread(group, runWaitForRequiredRoles);
threadA.start();
- Threads.join(threadA, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(threadA, 30 * 1000);
assertTrue(this.startTestWaitForRequiredRoles);
assertTrue(this.finishTestWaitForRequiredRoles);
assertTrue(this.rolesTestWaitForRequiredRoles.isEmpty());
@@ -286,7 +286,7 @@ public class RequiredRolesDUnitTest extends ReliabilityTestCase {
// end the wait and make sure no roles are missing
Host.getHost(0).getVM(vm2).invoke(create);
- Threads.join(threadA, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(threadA, 30 * 1000);
assertTrue(this.startTestWaitForRequiredRoles);
assertTrue(this.finishTestWaitForRequiredRoles);
assertTrue(this.rolesTestWaitForRequiredRoles.isEmpty());
@@ -432,7 +432,7 @@ public class RequiredRolesDUnitTest extends ReliabilityTestCase {
SystemFailure.setFailure((VirtualMachineError)e); // don't throw
}
String s = "Uncaught exception in thread " + t;
- LogWriterSupport.getLogWriter().error(s, e);
+ LogWriterUtils.getLogWriter().error(s, e);
fail(s);
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RolePerformanceDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RolePerformanceDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RolePerformanceDUnitTest.java
index e0249e0..e7c0ba7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RolePerformanceDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/RolePerformanceDUnitTest.java
@@ -26,7 +26,7 @@ import com.gemstone.gemfire.cache.RegionAttributes;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
/**
@@ -64,7 +64,7 @@ public class RolePerformanceDUnitTest extends CacheTestCase {
throw e;
}
else {
- LogWriterSupport.getLogWriter().info("testRolePerformance attempt #" + i +
+ LogWriterUtils.getLogWriter().info("testRolePerformance attempt #" + i +
" failed -- reattempting up to 10x", e);
}
}
@@ -102,7 +102,7 @@ public class RolePerformanceDUnitTest extends CacheTestCase {
String data = name + " results: millisNoRoles=" + millisNoRoles +
", millisWithRoles=" + millisWithRoles + ", deviation=" + deviation +
", ceiling=" + ceiling;
- LogWriterSupport.getLogWriter().info(data);
+ LogWriterUtils.getLogWriter().info(data);
assertTrue("millisWithRoles is greater than allowable deviation: " + data,
millisWithRoles <= ceiling);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SearchAndLoadDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SearchAndLoadDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SearchAndLoadDUnitTest.java
index 2523cad..01aa205 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SearchAndLoadDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SearchAndLoadDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -271,9 +271,9 @@ public class SearchAndLoadDUnitTest extends CacheTestCase {
public void run2() {
Region region = getCache().getRegion("root/"+name);
- LogWriterSupport.getLogWriter().info("t1 is invoking get("+objectName+")");
+ LogWriterUtils.getLogWriter().info("t1 is invoking get("+objectName+")");
try {
- LogWriterSupport.getLogWriter().info("t1 retrieved value " + region.get(objectName));
+ LogWriterUtils.getLogWriter().info("t1 retrieved value " + region.get(objectName));
fail("first load should have triggered an exception");
} catch (RuntimeException e) {
if (!e.getMessage().contains(exceptionString)) {
@@ -288,7 +288,7 @@ public class SearchAndLoadDUnitTest extends CacheTestCase {
final Object[] valueHolder = new Object[1];
// wait for vm1 to cause the loader to be invoked
- LogWriterSupport.getLogWriter().info("t2 is waiting for loader to be invoked by t1");
+ LogWriterUtils.getLogWriter().info("t2 is waiting for loader to be invoked by t1");
try {
loaderInvokedLatch.await(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
@@ -327,7 +327,7 @@ public class SearchAndLoadDUnitTest extends CacheTestCase {
fail("get() operation blocked for too long - test needs some work");
}
- LogWriterSupport.getLogWriter().info("t2 is invoking get("+objectName+")");
+ LogWriterUtils.getLogWriter().info("t2 is invoking get("+objectName+")");
Object value = valueHolder[0];
if (value instanceof RuntimeException) {
if ( ((Exception)value).getMessage().contains(exceptionString) ) {
@@ -336,7 +336,7 @@ public class SearchAndLoadDUnitTest extends CacheTestCase {
throw (RuntimeException)value;
}
} else {
- LogWriterSupport.getLogWriter().info("t2 retrieved value " + value);
+ LogWriterUtils.getLogWriter().info("t2 retrieved value " + value);
assertNotNull(value);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SlowRecDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SlowRecDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SlowRecDUnitTest.java
index 9ab9867..899fbb1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SlowRecDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/SlowRecDUnitTest.java
@@ -40,9 +40,9 @@ import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.DMStats;
import com.gemstone.gemfire.internal.tcp.Connection;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -295,7 +295,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
dequeuedMsgs = stats.getAsyncDequeuedMsgs();
curQueuedMsgs = queuedMsgs - dequeuedMsgs;
}
- LogWriterSupport.getLogWriter().info("After " + count + " " + " puts slowrec mode kicked in by queuing " + queuedMsgs + " for a total size of " + queueSize);
+ LogWriterUtils.getLogWriter().info("After " + count + " " + " puts slowrec mode kicked in by queuing " + queuedMsgs + " for a total size of " + queueSize);
} finally {
forceQueueFlush();
}
@@ -310,7 +310,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
final long start = System.currentTimeMillis();
Wait.waitForCriterion(ev, 30 * 1000, 200, true);
final long finish = System.currentTimeMillis();
- LogWriterSupport.getLogWriter().info("After " + (finish - start) + " ms async msgs where flushed. A total of " + stats.getAsyncDequeuedMsgs() + " were flushed. lastValue=" + lastValue);
+ LogWriterUtils.getLogWriter().info("After " + (finish - start) + " ms async msgs where flushed. A total of " + stats.getAsyncDequeuedMsgs() + " were flushed. lastValue=" + lastValue);
checkLastValueInOtherVm(lastValue, null);
}
@@ -393,7 +393,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
// + " dequeuedMsgs=" + dequeuedMsgs
// + " conflatedMsgs=" + conflatedMsgs);
final long finish = System.currentTimeMillis();
- LogWriterSupport.getLogWriter().info("After " + (finish - start) + " ms async msgs where flushed. A total of " + (stats.getAsyncDequeuedMsgs()-intialDeQueuedMsgs) + " were flushed. Leaving a queue size of " + stats.getAsyncQueueSize() + ". The lastValue was " + lastValue);
+ LogWriterUtils.getLogWriter().info("After " + (finish - start) + " ms async msgs where flushed. A total of " + (stats.getAsyncDequeuedMsgs()-intialDeQueuedMsgs) + " were flushed. Leaving a queue size of " + stats.getAsyncQueueSize() + ". The lastValue was " + lastValue);
checkLastValueInOtherVm(lastValue, null);
}
@@ -437,8 +437,8 @@ public class SlowRecDUnitTest extends CacheTestCase {
// give threads a chance to get queued
try {Thread.sleep(100);} catch (InterruptedException ignore) {fail("interrupted");}
forceQueueFlush();
- Threads.join(t, 2 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(t2, 2 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(t, 2 * 1000);
+ ThreadUtils.join(t2, 2 * 1000);
long endQueuedMsgs = stats.getAsyncQueuedMsgs();
long endConflatedMsgs = stats.getAsyncConflatedMsgs();
assertEquals(startConflatedMsgs, endConflatedMsgs);
@@ -481,7 +481,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
// now make sure update+destroy does not conflate
final Object key = "key";
- LogWriterSupport.getLogWriter().info("[testConflationSequence] about to force queuing");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] about to force queuing");
forceQueuing(r);
int count = 0;
@@ -493,7 +493,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
// long dequeuedMsgs = stats.getAsyncDequeuedMsgs();
int endCount = count+60;
- LogWriterSupport.getLogWriter().info("[testConflationSequence] about to build up queue");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] about to build up queue");
long begin = System.currentTimeMillis();
while (count < endCount) {
value = "count=" + count;
@@ -516,14 +516,14 @@ public class SlowRecDUnitTest extends CacheTestCase {
checkLastValueInOtherVm(lastValue, mylcb);
// now make sure create+update+localDestroy does not conflate
- LogWriterSupport.getLogWriter().info("[testConflationSequence] force queuing create-update-destroy");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] force queuing create-update-destroy");
forceQueuing(r);
initialConflatedMsgs = stats.getAsyncConflatedMsgs();
// initialDequeuedMsgs = stats.getAsyncDequeuedMsgs();
// dequeuedMsgs = stats.getAsyncDequeuedMsgs();
endCount = count + 40;
- LogWriterSupport.getLogWriter().info("[testConflationSequence] create-update-destroy");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] create-update-destroy");
begin = System.currentTimeMillis();
while (count < endCount) {
value = "count=" + count;
@@ -543,7 +543,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
checkLastValueInOtherVm(lastValue, null);
// now make sure update+invalidate does not conflate
- LogWriterSupport.getLogWriter().info("[testConflationSequence] force queuing update-invalidate");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] force queuing update-invalidate");
forceQueuing(r);
initialConflatedMsgs = stats.getAsyncConflatedMsgs();
// initialDequeuedMsgs = stats.getAsyncDequeuedMsgs();
@@ -554,7 +554,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
// dequeuedMsgs = stats.getAsyncDequeuedMsgs();
endCount = count + 40;
- LogWriterSupport.getLogWriter().info("[testConflationSequence] update-invalidate");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] update-invalidate");
begin = System.currentTimeMillis();
while (count < endCount) {
value = "count=" + count;
@@ -569,14 +569,14 @@ public class SlowRecDUnitTest extends CacheTestCase {
}
assertEquals(initialConflatedMsgs, stats.getAsyncConflatedMsgs());
forceQueueFlush();
- LogWriterSupport.getLogWriter().info("[testConflationSequence] assert other vm");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] assert other vm");
checkLastValueInOtherVm(lastValue, null);
r.destroy(key);
// now make sure updates to a conflating region are conflated even while
// updates to a non-conflating are not.
- LogWriterSupport.getLogWriter().info("[testConflationSequence] conflate & no-conflate regions");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] conflate & no-conflate regions");
forceQueuing(r);
final int initialAsyncSocketWrites = stats.getAsyncSocketWrites();
// initialDequeuedMsgs = stats.getAsyncDequeuedMsgs();
@@ -606,7 +606,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
endCount = count + 80;
begin = System.currentTimeMillis();
- LogWriterSupport.getLogWriter().info("[testConflationSequence:DEBUG] count=" + count
+ LogWriterUtils.getLogWriter().info("[testConflationSequence:DEBUG] count=" + count
+ " queuedMsgs=" + stats.getAsyncQueuedMsgs()
+ " conflatedMsgs=" + stats.getAsyncConflatedMsgs()
+ " dequeuedMsgs=" + stats.getAsyncDequeuedMsgs()
@@ -637,7 +637,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
}
forceQueueFlush();
- LogWriterSupport.getLogWriter().info("[testConflationSequence] assert other vm");
+ LogWriterUtils.getLogWriter().info("[testConflationSequence] assert other vm");
checkLastValueInOtherVm(lastValue, null);
}
/**
@@ -688,7 +688,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
fail("should have exceeded max-queue-size by now");
}
}
- LogWriterSupport.getLogWriter().info("After " + count + " " + VALUE_SIZE + " byte puts slowrec mode kicked in but the queue filled when its size reached " + queueSize + " with " + queuedMsgs + " msgs");
+ LogWriterUtils.getLogWriter().info("After " + count + " " + VALUE_SIZE + " byte puts slowrec mode kicked in but the queue filled when its size reached " + queueSize + " with " + queuedMsgs + " msgs");
// make sure we lost a connection to vm0
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -760,7 +760,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
fail("should have exceeded async-queue-timeout by now");
}
}
- LogWriterSupport.getLogWriter().info("After " + count + " " + VALUE_SIZE + " byte puts slowrec mode kicked in but the queue filled when its size reached " + queueSize + " with " + queuedMsgs + " msgs");
+ LogWriterUtils.getLogWriter().info("After " + count + " " + VALUE_SIZE + " byte puts slowrec mode kicked in but the queue filled when its size reached " + queueSize + " with " + queuedMsgs + " msgs");
// make sure we lost a connection to vm0
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -818,7 +818,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
public final Object CONTROL_LOCK = new Object();
public void afterCreate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info(event.getRegion().getName() + " afterCreate " + event.getKey());
+ LogWriterUtils.getLogWriter().info(event.getRegion().getName() + " afterCreate " + event.getKey());
synchronized(this.CONTROL_LOCK) {
if (event.getCallbackArgument() != null) {
this.callbackArguments.add(
@@ -830,7 +830,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
processEvent(event);
}
public void afterUpdate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info(event.getRegion().getName() + " afterUpdate " + event.getKey());
+ LogWriterUtils.getLogWriter().info(event.getRegion().getName() + " afterUpdate " + event.getKey());
synchronized(this.CONTROL_LOCK) {
if (event.getCallbackArgument() != null) {
this.callbackArguments.add(
@@ -884,14 +884,14 @@ public class SlowRecDUnitTest extends CacheTestCase {
}
private void processSleep(EntryEvent event) {
int sleepMs = ((Integer)event.getNewValue()).intValue();
- LogWriterSupport.getLogWriter().info("[processSleep] sleeping for " + sleepMs);
+ LogWriterUtils.getLogWriter().info("[processSleep] sleeping for " + sleepMs);
try {
Thread.sleep(sleepMs);
} catch (InterruptedException ignore) {fail("interrupted");}
}
private void processWait(EntryEvent event) {
int sleepMs = ((Integer)event.getNewValue()).intValue();
- LogWriterSupport.getLogWriter().info("[processWait] waiting for " + sleepMs);
+ LogWriterUtils.getLogWriter().info("[processWait] waiting for " + sleepMs);
synchronized(this.CONTROL_LOCK) {
try {
this.CONTROL_LOCK.wait(sleepMs);
@@ -899,7 +899,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
}
}
private void processDisconnect(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("[processDisconnect] disconnecting");
+ LogWriterUtils.getLogWriter().info("[processDisconnect] disconnecting");
disconnectFromDS();
}
};
@@ -920,7 +920,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().error("Encountered exception: ", t);
+ LogWriterUtils.getLogWriter().error("Encountered exception: ", t);
throw t;
}
finally {
@@ -989,11 +989,11 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// put vm0 cache listener into wait
- LogWriterSupport.getLogWriter().info("[doTestMultipleRegionConflation] about to put vm0 into wait");
+ LogWriterUtils.getLogWriter().info("[doTestMultipleRegionConflation] about to put vm0 into wait");
r1.put(KEY_WAIT, new Integer(millisToWait));
// build up queue size
- LogWriterSupport.getLogWriter().info("[doTestMultipleRegionConflation] building up queue size...");
+ LogWriterUtils.getLogWriter().info("[doTestMultipleRegionConflation] building up queue size...");
final Object key = "key";
final int socketBufferSize = getSystem().getConfig().getSocketBufferSize();
final int VALUE_SIZE = socketBufferSize*3;
@@ -1006,7 +1006,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
r1.put(key, value);
}
- LogWriterSupport.getLogWriter().info("[doTestMultipleRegionConflation] After " +
+ LogWriterUtils.getLogWriter().info("[doTestMultipleRegionConflation] After " +
count + " puts of size " + VALUE_SIZE +
" slowrec mode kicked in with queue size=" + stats.getAsyncQueueSize());
@@ -1064,7 +1064,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
CALLBACK_DESTROY, CALLBACK_CREATE, CALLBACK_UPDATE };
// send notify to vm0
- LogWriterSupport.getLogWriter().info("[doTestMultipleRegionConflation] wake up vm0");
+ LogWriterUtils.getLogWriter().info("[doTestMultipleRegionConflation] wake up vm0");
getOtherVm().invoke(new SerializableRunnable("Wake up other vm") {
public void run() {
synchronized(doTestMultipleRegionConflation_R1_Listener.CONTROL_LOCK) {
@@ -1074,7 +1074,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// wait for queue to be flushed
- LogWriterSupport.getLogWriter().info("[doTestMultipleRegionConflation] wait for vm0");
+ LogWriterUtils.getLogWriter().info("[doTestMultipleRegionConflation] wait for vm0");
getOtherVm().invoke(new SerializableRunnable("Wait for other vm") {
public void run() {
try {
@@ -1093,12 +1093,12 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// assert values on both listeners
- LogWriterSupport.getLogWriter().info("[doTestMultipleRegionConflation] assert callback arguments");
+ LogWriterUtils.getLogWriter().info("[doTestMultipleRegionConflation] assert callback arguments");
getOtherVm().invoke(new SerializableRunnable("Assert callback arguments") {
public void run() {
synchronized(doTestMultipleRegionConflation_R1_Listener.CONTROL_LOCK) {
- LogWriterSupport.getLogWriter().info("doTestMultipleRegionConflation_R1_Listener.callbackArguments=" + doTestMultipleRegionConflation_R1_Listener.callbackArguments);
- LogWriterSupport.getLogWriter().info("doTestMultipleRegionConflation_R1_Listener.callbackTypes=" + doTestMultipleRegionConflation_R1_Listener.callbackTypes);
+ LogWriterUtils.getLogWriter().info("doTestMultipleRegionConflation_R1_Listener.callbackArguments=" + doTestMultipleRegionConflation_R1_Listener.callbackArguments);
+ LogWriterUtils.getLogWriter().info("doTestMultipleRegionConflation_R1_Listener.callbackTypes=" + doTestMultipleRegionConflation_R1_Listener.callbackTypes);
assertEquals(doTestMultipleRegionConflation_R1_Listener.callbackArguments.size(),
doTestMultipleRegionConflation_R1_Listener.callbackTypes.size());
int i = 0;
@@ -1112,8 +1112,8 @@ public class SlowRecDUnitTest extends CacheTestCase {
}
}
synchronized(doTestMultipleRegionConflation_R2_Listener.CONTROL_LOCK) {
- LogWriterSupport.getLogWriter().info("doTestMultipleRegionConflation_R2_Listener.callbackArguments=" + doTestMultipleRegionConflation_R2_Listener.callbackArguments);
- LogWriterSupport.getLogWriter().info("doTestMultipleRegionConflation_R2_Listener.callbackTypes=" + doTestMultipleRegionConflation_R2_Listener.callbackTypes);
+ LogWriterUtils.getLogWriter().info("doTestMultipleRegionConflation_R2_Listener.callbackArguments=" + doTestMultipleRegionConflation_R2_Listener.callbackArguments);
+ LogWriterUtils.getLogWriter().info("doTestMultipleRegionConflation_R2_Listener.callbackTypes=" + doTestMultipleRegionConflation_R2_Listener.callbackTypes);
assertEquals(doTestMultipleRegionConflation_R2_Listener.callbackArguments.size(),
doTestMultipleRegionConflation_R2_Listener.callbackTypes.size());
int i = 0;
@@ -1142,7 +1142,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().error("Encountered exception: ", t);
+ LogWriterUtils.getLogWriter().error("Encountered exception: ", t);
throw t;
}
finally {
@@ -1188,13 +1188,13 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// put vm0 cache listener into wait
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] about to put vm0 into wait");
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] about to put vm0 into wait");
int millisToWait = 1000 * 60 * 5; // 5 minutes
r.put(KEY_WAIT, new Integer(millisToWait));
r.put(KEY_DISCONNECT, KEY_DISCONNECT);
// build up queue size
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] building up queue size...");
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] building up queue size...");
final Object key = "key";
final int socketBufferSize = getSystem().getConfig().getSocketBufferSize();
final int VALUE_SIZE = socketBufferSize*3;
@@ -1209,7 +1209,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
assertFalse(System.currentTimeMillis() >= abortMillis);
}
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] After " +
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] After " +
count + " puts of size " + VALUE_SIZE +
" slowrec mode kicked in with queue size=" + stats.getAsyncQueueSize());
@@ -1226,7 +1226,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
assertFalse(System.currentTimeMillis() >= abortMillis);
}
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] After " +
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] After " +
count + " puts of size " + VALUE_SIZE + " queue size has reached " +
stats.getAsyncQueueSize() + " bytes and number of queues is " +
stats.getAsyncQueues() + ".");
@@ -1238,7 +1238,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
assertTrue(dm.getOtherDistributionManagerIds().size() > others.size());
// send notify to vm0
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] wake up vm0");
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] wake up vm0");
getOtherVm().invoke(new SerializableRunnable("Wake up other vm") {
public void run() {
synchronized(doTestDisconnectCleanup_Listener.CONTROL_LOCK) {
@@ -1248,7 +1248,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// make sure we lost a connection to vm0
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] wait for vm0 to disconnect");
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] wait for vm0 to disconnect");
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return dm.getOtherDistributionManagerIds().size() <= others.size();
@@ -1261,7 +1261,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
assertEquals(others, dm.getOtherDistributionManagerIds());
// check free memory... perform wait loop with System.gc
- LogWriterSupport.getLogWriter().info("[testDisconnectCleanup] wait for queue cleanup");
+ LogWriterUtils.getLogWriter().info("[testDisconnectCleanup] wait for queue cleanup");
ev = new WaitCriterion() {
public boolean done() {
if (stats.getAsyncQueues() <= initialQueues) {
@@ -1296,7 +1296,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().error("Encountered exception: ", t);
+ LogWriterUtils.getLogWriter().error("Encountered exception: ", t);
throw t;
}
finally {
@@ -1344,12 +1344,12 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// put vm0 cache listener into wait
- LogWriterSupport.getLogWriter().info("[testPartialMessage] about to put vm0 into wait");
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] about to put vm0 into wait");
final int millisToWait = 1000 * 60 * 5; // 5 minutes
r.put(KEY_WAIT, new Integer(millisToWait));
// build up queue size
- LogWriterSupport.getLogWriter().info("[testPartialMessage] building up queue size...");
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] building up queue size...");
final Object key = "key";
final int socketBufferSize = getSystem().getConfig().getSocketBufferSize();
final int VALUE_SIZE = socketBufferSize*3;
@@ -1365,7 +1365,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
final int partialId = count;
assertEquals(0, stats.getAsyncConflatedMsgs());
- LogWriterSupport.getLogWriter().info("[testPartialMessage] After " +
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] After " +
count + " puts of size " + VALUE_SIZE +
" slowrec mode kicked in with queue size=" + stats.getAsyncQueueSize());
@@ -1394,7 +1394,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
final int[] expectedArgs = { partialId, conflateId };
// send notify to vm0
- LogWriterSupport.getLogWriter().info("[testPartialMessage] wake up vm0");
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] wake up vm0");
getOtherVm().invoke(new SerializableRunnable("Wake up other vm") {
public void run() {
synchronized(doTestPartialMessage_Listener.CONTROL_LOCK) {
@@ -1404,7 +1404,7 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// wait for queue to be flushed
- LogWriterSupport.getLogWriter().info("[testPartialMessage] wait for vm0");
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] wait for vm0");
getOtherVm().invoke(new SerializableRunnable("Wait for other vm") {
public void run() {
try {
@@ -1430,11 +1430,11 @@ public class SlowRecDUnitTest extends CacheTestCase {
});
// assert values on both listeners
- LogWriterSupport.getLogWriter().info("[testPartialMessage] assert callback arguments");
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] assert callback arguments");
getOtherVm().invoke(new SerializableRunnable("Assert callback arguments") {
public void run() {
synchronized(doTestPartialMessage_Listener.CONTROL_LOCK) {
- LogWriterSupport.getLogWriter().info("[testPartialMessage] " +
+ LogWriterUtils.getLogWriter().info("[testPartialMessage] " +
"doTestPartialMessage_Listener.callbackArguments=" +
doTestPartialMessage_Listener.callbackArguments);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXDistributedDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXDistributedDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXDistributedDUnitTest.java
index 1e400c2..7a306f0 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXDistributedDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXDistributedDUnitTest.java
@@ -83,7 +83,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -527,7 +527,7 @@ public class TXDistributedDUnitTest extends CacheTestCase {
@Override
public Properties getDistributedSystemProperties() {
Properties p = super.getDistributedSystemProperties();
- p.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ p.put("log-level", LogWriterUtils.getDUnitLogLevel());
return p;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXOrderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXOrderDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXOrderDUnitTest.java
index d014fec..a253f09 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXOrderDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/TXOrderDUnitTest.java
@@ -52,7 +52,7 @@ import com.gemstone.gemfire.cache.util.TransactionListenerAdapter;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -187,7 +187,7 @@ public class TXOrderDUnitTest extends CacheTestCase {
af.addCacheListener(cl1);
CacheLoader cl = new CacheLoader() {
public Object load(LoaderHelper helper) throws CacheLoaderException {
- LogWriterSupport.getLogWriter().info("Loading value:"+helper.getKey()+"_value");
+ LogWriterUtils.getLogWriter().info("Loading value:"+helper.getKey()+"_value");
return helper.getKey()+"_value";
}
public void close() {
@@ -206,7 +206,7 @@ public class TXOrderDUnitTest extends CacheTestCase {
af.setScope(Scope.DISTRIBUTED_ACK);
CacheListener cl1 = new CacheListenerAdapter() {
public void afterCreate(EntryEvent e) {
- LogWriterSupport.getLogWriter().info("op:"+e.getOperation().toString());
+ LogWriterUtils.getLogWriter().info("op:"+e.getOperation().toString());
assertTrue(!e.getOperation().isLocalLoad());
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/DistributedSystemDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/DistributedSystemDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/DistributedSystemDUnitTest.java
index 9d9529c..fcaaa2d 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/DistributedSystemDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/DistributedSystemDUnitTest.java
@@ -45,9 +45,9 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.SocketCreator;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -258,7 +258,7 @@ public class DistributedSystemDUnitTest extends DistributedTestCase {
public void testSpecificTcpPort() throws Exception {
Properties config = new Properties();
int tcpPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- config.put("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.put("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
config.setProperty("tcp-port", String.valueOf(tcpPort));
system = (InternalDistributedSystem)DistributedSystem.connect(config);
DistributionManager dm = (DistributionManager)system.getDistributionManager();
@@ -286,10 +286,10 @@ public class DistributedSystemDUnitTest extends DistributedTestCase {
if (loopback != null) {
Properties config = new Properties();
config.put(DistributionConfig.MCAST_PORT_NAME, "0");
- String locators = InetAddress.getLocalHost().getHostName()+":"+DistributedTestSupport.getDUnitLocatorPort();
+ String locators = InetAddress.getLocalHost().getHostName()+":"+DistributedTestUtils.getDUnitLocatorPort();
config.put(DistributionConfig.LOCATORS_NAME, locators);
config.setProperty(DistributionConfig.BIND_ADDRESS_NAME, loopback.getHostAddress());
- LogWriterSupport.getLogWriter().info("attempting to connect with " + loopback +" and locators=" + locators);
+ LogWriterUtils.getLogWriter().info("attempting to connect with " + loopback +" and locators=" + locators);
try {
system = (InternalDistributedSystem)DistributedSystem.connect(config);
system.disconnect();
@@ -305,7 +305,7 @@ public class DistributedSystemDUnitTest extends DistributedTestCase {
public void testUDPPortRange() throws Exception {
Properties config = new Properties();
int unicastPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- config.put("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.put("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
// Minimum 3 ports required in range for UDP, FD_SOCK and TcpConduit.
config.setProperty(DistributionConfig.MEMBERSHIP_PORT_RANGE_NAME,
""+unicastPort+"-"+(unicastPort+2));
@@ -319,7 +319,7 @@ public class DistributedSystemDUnitTest extends DistributedTestCase {
public void testMembershipPortRangeWithExactThreeValues() throws Exception {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
config.setProperty(DistributionConfig.MEMBERSHIP_PORT_RANGE_NAME, ""
+ (DistributionConfig.DEFAULT_MEMBERSHIP_PORT_RANGE[1] - 2) + "-"
+ (DistributionConfig.DEFAULT_MEMBERSHIP_PORT_RANGE[1]));
[12/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ConfigCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ConfigCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ConfigCommandsDUnitTest.java
index fb7f41e..dca0f78 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ConfigCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ConfigCommandsDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -125,10 +125,10 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
List<String> jvmArgs = runtimeBean.getInputArguments();
- LogWriterSupport.getLogWriter().info("#SB Actual JVM Args : ");
+ LogWriterUtils.getLogWriter().info("#SB Actual JVM Args : ");
for (String jvmArg : jvmArgs) {
- LogWriterSupport.getLogWriter().info("#SB JVM " + jvmArg);
+ LogWriterUtils.getLogWriter().info("#SB JVM " + jvmArg);
}
InternalDistributedSystem system = (InternalDistributedSystem) cache.getDistributedSystem();
@@ -142,7 +142,7 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
CommandResult cmdResult = executeCommand(command);
String resultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Hiding the defaults\n" + resultStr);
+ LogWriterUtils.getLogWriter().info("#SB Hiding the defaults\n" + resultStr);
assertEquals(true, cmdResult.getStatus().equals(Status.OK));
assertEquals(true, resultStr.contains("G1"));
@@ -152,7 +152,7 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(command + " --" + CliStrings.DESCRIBE_CONFIG__HIDE__DEFAULTS + "=false");
resultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB No hiding of defaults\n" + resultStr);
+ LogWriterUtils.getLogWriter().info("#SB No hiding of defaults\n" + resultStr);
assertEquals(true, cmdResult.getStatus().equals(Status.OK));
assertEquals(true, resultStr.contains("is-server"));
@@ -281,8 +281,8 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.ALTER_RUNTIME_CONFIG__LOG__DISK__SPACE__LIMIT, "10");
CommandResult cmdResult = executeCommand(csb.getCommandString());
String resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Result\n");
- LogWriterSupport.getLogWriter().info(resultString);
+ LogWriterUtils.getLogWriter().info("Result\n");
+ LogWriterUtils.getLogWriter().info(resultString);
assertEquals(true, cmdResult.getStatus().equals(Status.OK));
assertEquals(LogWriterImpl.INFO_LEVEL, config.getLogLevel());
assertEquals(50, config.getLogFileSizeLimit());
@@ -320,8 +320,8 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
CommandStringBuilder csb = new CommandStringBuilder(CliStrings.ALTER_RUNTIME_CONFIG);
CommandResult cmdResult = executeCommand(csb.getCommandString());
String resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Result\n");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info("#SB Result\n");
+ LogWriterUtils.getLogWriter().info(resultAsString);
assertEquals(true, cmdResult.getStatus().equals(Status.ERROR));
assertTrue(resultAsString.contains(CliStrings.ALTER_RUNTIME_CONFIG__RELEVANT__OPTION__MESSAGE));
@@ -329,8 +329,8 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.ALTER_RUNTIME_CONFIG__LOG__DISK__SPACE__LIMIT, "2000000000");
cmdResult = executeCommand(csb.getCommandString());
resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Result\n");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info("#SB Result\n");
+ LogWriterUtils.getLogWriter().info(resultAsString);
assertEquals(true, cmdResult.getStatus().equals(Status.ERROR));
}
@@ -365,8 +365,8 @@ public class ConfigCommandsDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.ALTER_RUNTIME_CONFIG__LOG__DISK__SPACE__LIMIT, "10");
CommandResult cmdResult = executeCommand(csb.getCommandString());
String resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Result\n");
- LogWriterSupport.getLogWriter().info(resultString);
+ LogWriterUtils.getLogWriter().info("#SB Result\n");
+ LogWriterUtils.getLogWriter().info(resultString);
assertEquals(true, cmdResult.getStatus().equals(Status.OK));
assertEquals(LogWriterImpl.INFO_LEVEL, config.getLogLevel());
assertEquals(50, config.getLogFileSizeLimit());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CreateAlterDestroyRegionCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CreateAlterDestroyRegionCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CreateAlterDestroyRegionCommandsDUnitTest.java
index 9b5aa66..9f6b141 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CreateAlterDestroyRegionCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CreateAlterDestroyRegionCommandsDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -234,7 +234,7 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
ObjectName queryExpON = new ObjectName(queryExp);
return !(mbeanServer.queryNames(null, queryExpON).isEmpty());
} catch (MalformedObjectNameException mone) {
- LogWriterSupport.getLogWriter().error(mone);
+ LogWriterUtils.getLogWriter().error(mone);
fail(mone.getMessage());
return false;
}
@@ -252,36 +252,36 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
// Test failure when region not found
String command = "destroy region --name=DOESNOTEXIST";
- LogWriterSupport.getLogWriter().info("testDestroyRegion command=" + command);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion command=" + command);
CommandResult cmdResult = executeCommand(command);
String strr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testDestroyRegion strr=" + strr);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion strr=" + strr);
assertTrue(stringContainsLine(strr, "Could not find.*\"DOESNOTEXIST\".*"));
assertEquals(Result.Status.ERROR, cmdResult.getStatus());
// Test unable to destroy with co-location
command = "destroy region --name=/Customer";
- LogWriterSupport.getLogWriter().info("testDestroyRegion command=" + command);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion command=" + command);
cmdResult = executeCommand(command);
strr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testDestroyRegion strr=" + strr);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion strr=" + strr);
assertEquals(Result.Status.ERROR, cmdResult.getStatus());
// Test success
command = "destroy region --name=/Order";
- LogWriterSupport.getLogWriter().info("testDestroyRegion command=" + command);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion command=" + command);
cmdResult = executeCommand(command);
strr = commandResultToString(cmdResult);
assertTrue(stringContainsLine(strr, ".*Order.*destroyed successfully.*"));
- LogWriterSupport.getLogWriter().info("testDestroyRegion strr=" + strr);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion strr=" + strr);
assertEquals(Result.Status.OK, cmdResult.getStatus());
command = "destroy region --name=/Customer";
- LogWriterSupport.getLogWriter().info("testDestroyRegion command=" + command);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion command=" + command);
cmdResult = executeCommand(command);
strr = commandResultToString(cmdResult);
assertTrue(stringContainsLine(strr, ".*Customer.*destroyed successfully.*"));
- LogWriterSupport.getLogWriter().info("testDestroyRegion strr=" + strr);
+ LogWriterUtils.getLogWriter().info("testDestroyRegion strr=" + strr);
assertEquals(Result.Status.OK, cmdResult.getStatus());
}
@@ -290,19 +290,19 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
createDefaultSetup(null);
String command = CliStrings.CREATE_REGION + " --" + CliStrings.CREATE_REGION__REGION + "=" + this.region46391 + " --" + CliStrings.CREATE_REGION__REGIONSHORTCUT + "=REPLICATE";
- LogWriterSupport.getLogWriter().info("testCreateRegion46391 create region command=" + command);
+ LogWriterUtils.getLogWriter().info("testCreateRegion46391 create region command=" + command);
CommandResult cmdResult = executeCommand(command);
assertEquals(Result.Status.OK, cmdResult.getStatus());
command = CliStrings.PUT + " --" + CliStrings.PUT__KEY + "=k1" + " --" + CliStrings.PUT__VALUE + "=k1" + " --" + CliStrings.PUT__REGIONNAME + "=" + this.region46391;
- LogWriterSupport.getLogWriter().info("testCreateRegion46391 put command=" + command);
+ LogWriterUtils.getLogWriter().info("testCreateRegion46391 put command=" + command);
CommandResult cmdResult2 = executeCommand(command);
assertEquals(Result.Status.OK, cmdResult2.getStatus());
- LogWriterSupport.getLogWriter().info("testCreateRegion46391 cmdResult2=" + commandResultToString(cmdResult2));
+ LogWriterUtils.getLogWriter().info("testCreateRegion46391 cmdResult2=" + commandResultToString(cmdResult2));
String str1 = "Result : true";
String str2 = "Key : k1";
String str3 = "Key Class : java.lang.String";
@@ -815,7 +815,7 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
ObjectName queryExpON = new ObjectName(queryExp);
return !(mbeanServer.queryNames(null, queryExpON).isEmpty());
} catch (MalformedObjectNameException mone) {
- LogWriterSupport.getLogWriter().error(mone);
+ LogWriterUtils.getLogWriter().error(mone);
fail(mone.getMessage());
return false;
}
@@ -1001,7 +1001,7 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
ObjectName queryExpON = new ObjectName(queryExp);
return !(mbeanServer.queryNames(null, queryExpON).isEmpty());
} catch (MalformedObjectNameException mone) {
- LogWriterSupport.getLogWriter().error(mone);
+ LogWriterUtils.getLogWriter().error(mone);
fail(mone.getMessage());
return false;
}
@@ -1034,7 +1034,7 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
commandStringBuilder = new CommandStringBuilder(CliStrings.DESTROY_REGION);
commandStringBuilder.addOption(CliStrings.DESTROY_REGION__REGION, regionName);
cmdResult = executeCommand(commandStringBuilder.toString());
- LogWriterSupport.getLogWriter().info("#SB" + commandResultToString(cmdResult));
+ LogWriterUtils.getLogWriter().info("#SB" + commandResultToString(cmdResult));
assertEquals(Result.Status.OK, cmdResult.getStatus());
// Make sure the region was removed from the shared config
@@ -1087,7 +1087,7 @@ public class CreateAlterDestroyRegionCommandsDUnitTest extends CliCommandTestBas
executeCommand("undeploy --jar=" + fileToDelete.getName());
}
} catch (IOException e) {
- LogWriterSupport.getLogWriter().error("Unable to delete file", e);
+ LogWriterUtils.getLogWriter().error("Unable to delete file", e);
}
}
this.filesToBeDeleted.clear();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DiskStoreCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DiskStoreCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DiskStoreCommandsDUnitTest.java
index e9a3de8..826f128 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DiskStoreCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DiskStoreCommandsDUnitTest.java
@@ -48,7 +48,7 @@ import com.gemstone.gemfire.management.internal.cli.shell.Gfsh;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -372,16 +372,16 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
}
});
String command = "validate offline-disk-store --name=" + diskStoreName1 + " --disk-dirs=" + diskStoreDir.getAbsolutePath();
- LogWriterSupport.getLogWriter().info("testValidateDiskStore command: " + command);
+ LogWriterUtils.getLogWriter().info("testValidateDiskStore command: " + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testValidateDiskStore cmdResult is stringResult " + stringResult);
+ LogWriterUtils.getLogWriter().info("testValidateDiskStore cmdResult is stringResult " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
assertTrue(stringResult.contains("Total number of region entries in this disk store is"));
} else {
- LogWriterSupport.getLogWriter().info("testValidateDiskStore cmdResult is null");
+ LogWriterUtils.getLogWriter().info("testValidateDiskStore cmdResult is null");
fail("Did not get CommandResult in testValidateDiskStore");
}
}
@@ -426,7 +426,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
}
});
String command = "export offline-disk-store --name=" + diskStoreName1 + " --disk-dirs=" + diskStoreDir.getAbsolutePath() + " --dir=" + exportDir;
- LogWriterSupport.getLogWriter().info("testExportDiskStore command" + command);
+ LogWriterUtils.getLogWriter().info("testExportDiskStore command" + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
assertEquals(Result.Status.OK, cmdResult.getStatus());
@@ -435,7 +435,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
SnapshotTestUtil.checkSnapshotEntries(exportDir, entries, diskStoreName1, region2);
} else {
- LogWriterSupport.getLogWriter().info("testExportOfflineDiskStore cmdResult is null");
+ LogWriterUtils.getLogWriter().info("testExportOfflineDiskStore cmdResult is null");
fail("Did not get CommandResult in testExportOfflineDiskStore");
}
}
@@ -667,7 +667,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
CommandResult cmdResult = executeCommand(commandString);
String resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB command output : \n" + resultString);
+ LogWriterUtils.getLogWriter().info("#SB command output : \n" + resultString);
assertEquals(true, Result.Status.OK.equals(cmdResult.getStatus()));
assertEquals(true, resultString.contains("concurrencyLevel=5"));
assertEquals(true, resultString.contains("lruAction=local-destroy"));
@@ -698,7 +698,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(commandString);
resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("command output : \n" + resultString);
+ LogWriterUtils.getLogWriter().info("command output : \n" + resultString);
assertEquals(true, Result.Status.OK.equals(cmdResult.getStatus()));
Object postDestroyValue = vm1.invoke(new SerializableCallable() {
@@ -725,7 +725,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(commandString);
resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Alter DiskStore with wrong remove option : \n" + resultString);
+ LogWriterUtils.getLogWriter().info("Alter DiskStore with wrong remove option : \n" + resultString);
assertEquals(true, Result.Status.ERROR.equals(cmdResult.getStatus()));
filesToBeDeleted.add(diskDirName);
@@ -792,7 +792,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
CommandResult cmdResult = executeCommand(commandString);
String resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Result from full backup : \n" + resultAsString);
+ LogWriterUtils.getLogWriter().info("Result from full backup : \n" + resultAsString);
assertEquals(Result.Status.OK, cmdResult.getStatus());
assertEquals(true, resultAsString.contains("Manager"));
assertEquals(true, resultAsString.contains(vm1Name));
@@ -822,7 +822,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(csb.toString());
resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Result from incremental backup : \n" + resultAsString);
+ LogWriterUtils.getLogWriter().info("Result from incremental backup : \n" + resultAsString);
assertEquals(Result.Status.OK, cmdResult.getStatus());
assertEquals(true, resultAsString.contains("Manager"));
@@ -1149,7 +1149,7 @@ public class DiskStoreCommandsDUnitTest extends CliCommandTestBase {
try {
FileUtil.delete(new File(path));
} catch (IOException e) {
- LogWriterSupport.getLogWriter().error("Unable to delete file", e);
+ LogWriterUtils.getLogWriter().error("Unable to delete file", e);
}
}
this.filesToBeDeleted.clear();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/FunctionCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/FunctionCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/FunctionCommandsDUnitTest.java
index 494890e..7378ada 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/FunctionCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/FunctionCommandsDUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.management.internal.cli.json.GfJsonException;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.result.TabularResultData;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -120,11 +120,11 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
e.printStackTrace();
}
String command = "execute function --id=" + function.getId() + " --region=" + "/" + "RegionOne";
- LogWriterSupport.getLogWriter().info("testExecuteFunctionWithNoRegionOnManager command : " + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionWithNoRegionOnManager command : " + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
String strCmdResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionWithNoRegionOnManager stringResult : " + strCmdResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionWithNoRegionOnManager stringResult : " + strCmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
assertTrue(strCmdResult.contains("Execution summary"));
} else {
@@ -152,13 +152,13 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
});
String command = "execute function --id=" + function.getId() + " --region=" + REGION_NAME;
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnRegion command=" + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnRegion command=" + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
assertEquals(Result.Status.OK, cmdResult.getStatus());
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnRegion cmdResult=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnRegion cmdResult=" + cmdResult);
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnRegion stringResult=" + stringResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnRegion stringResult=" + stringResult);
assert (stringResult.contains("Execution summary"));
} else {
fail("testExecuteFunctionOnRegion did not return CommandResult");
@@ -195,7 +195,7 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
if (bean == null) {
return false;
} else {
- LogWriterSupport.getLogWriter().info("Probing for checkRegionMBeans testExecuteFunctionOnRegionBug51480 finished");
+ LogWriterUtils.getLogWriter().info("Probing for checkRegionMBeans testExecuteFunctionOnRegionBug51480 finished");
return true;
}
}
@@ -228,13 +228,13 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
String command = "execute function --id=" + function.getId() + " --region=" + REGION_ONE;
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnRegionBug51480 command=" + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnRegionBug51480 command=" + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnRegionBug51480 cmdResult=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnRegionBug51480 cmdResult=" + cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnRegionBug51480 stringResult=" + stringResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnRegionBug51480 stringResult=" + stringResult);
assert (stringResult.contains("Execution summary"));
} else {
fail("testExecuteFunctionOnRegionBug51480 did not return CommandResult");
@@ -263,12 +263,12 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
});
String command = "execute function --id=" + function.getId() + " --member=" + vm1MemberId;
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMember command=" + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMember command=" + command);
CommandResult cmdResult = executeCommand(command);
assertEquals(Result.Status.OK, cmdResult.getStatus());
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMember stringResult:" + stringResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMember stringResult:" + stringResult);
assertTrue(stringResult.contains("Execution summary"));
}
@@ -292,13 +292,13 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
}
});
String command = "execute function --id=" + function.getId();
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMembers command=" + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMembers command=" + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
assertEquals(Result.Status.OK, cmdResult.getStatus());
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMembers cmdResult:" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMembers cmdResult:" + cmdResult);
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMembers stringResult:" + stringResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMembers stringResult:" + stringResult);
assertTrue(stringResult.contains("Execution summary"));
} else {
fail("testExecuteFunctionOnMembers did not return CommandResult");
@@ -326,13 +326,13 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
String command = "execute function --id=" + function.getId() + " --arguments=arg1,arg2";
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMembersWithArgs command=" + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMembersWithArgs command=" + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
assertEquals(Result.Status.OK, cmdResult.getStatus());
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMembersWithArgs cmdResult:" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMembersWithArgs cmdResult:" + cmdResult);
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnMembersWithArgs stringResult:" + stringResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnMembersWithArgs stringResult:" + stringResult);
assertTrue(stringResult.contains("Execution summary"));
assertTrue(stringResult.contains("arg1"));
} else {
@@ -388,13 +388,13 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
});
String command = "execute function --id=" + TestFunction.TEST_FUNCTION1 + " --groups=Group1,Group2";
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnGroups command=" + command);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnGroups command=" + command);
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnGroups cmdResult=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnGroups cmdResult=" + cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
TabularResultData resultData = (TabularResultData) cmdResult.getResultData();
List<String> members = resultData.retrieveAllValues("Member ID/Name");
- LogWriterSupport.getLogWriter().info("testExecuteFunctionOnGroups members=" + members);
+ LogWriterUtils.getLogWriter().info("testExecuteFunctionOnGroups members=" + members);
assertTrue(members.size() == 2 && members.contains(vm1id) && members.contains(vm2id));
}
@@ -406,12 +406,12 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
final VM vm1 = Host.getHost(0).getVM(1);
final String vm1MemberId = (String) vm1.invoke(FunctionCommandsDUnitTest.class, "getMemberId");
String command = "destroy function --id=" + function.getId() + " --member=" + vm1MemberId;
- LogWriterSupport.getLogWriter().info("testDestroyOnMember command=" + command);
+ LogWriterUtils.getLogWriter().info("testDestroyOnMember command=" + command);
CommandResult cmdResult = executeCommand(command);
if (cmdResult != null) {
String strCmdResult = commandResultToString(cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
- LogWriterSupport.getLogWriter().info("testDestroyOnMember strCmdResult=" + strCmdResult);
+ LogWriterUtils.getLogWriter().info("testDestroyOnMember strCmdResult=" + strCmdResult);
assertTrue(strCmdResult.contains("Destroyed TestFunction1 Successfully"));
} else {
fail("testDestroyOnMember failed as did not get CommandResult");
@@ -464,14 +464,14 @@ public class FunctionCommandsDUnitTest extends CliCommandTestBase {
});
String command = "destroy function --id=" + TestFunction.TEST_FUNCTION1 + " --groups=Group1,Group2";
- LogWriterSupport.getLogWriter().info("testDestroyOnGroups command=" + command);
+ LogWriterUtils.getLogWriter().info("testDestroyOnGroups command=" + command);
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testDestroyOnGroups cmdResult=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testDestroyOnGroups cmdResult=" + cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
String content = null;
try {
content = cmdResult.getContent().get("message").toString();
- LogWriterSupport.getLogWriter().info("testDestroyOnGroups content = " + content);
+ LogWriterUtils.getLogWriter().info("testDestroyOnGroups content = " + content);
} catch (GfJsonException e) {
fail("testDestroyOnGroups exception=" + e);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GemfireDataCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GemfireDataCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GemfireDataCommandsDUnitTest.java
index 2a85e40..d6719d1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GemfireDataCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GemfireDataCommandsDUnitTest.java
@@ -57,7 +57,7 @@ import com.gemstone.gemfire.management.internal.cli.result.TabularResultData;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -144,29 +144,29 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
RegionFactory regionFactory = cache.createRegionFactory(RegionShortcut.REPLICATE);
Region dataRegion = regionFactory.create(DATA_REGION_NAME);
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
dataRegion = dataRegion.createSubregion(DATA_REGION_NAME_CHILD_1, dataRegion.getAttributes());
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
dataRegion = dataRegion.createSubregion(DATA_REGION_NAME_CHILD_1_2, dataRegion.getAttributes());
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
dataRegion = regionFactory.create(DATA_REGION_NAME_VM1);
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
PartitionAttributes partitionAttrs = new PartitionAttributesFactory().setRedundantCopies(2).create();
RegionFactory<Object, Object> partitionRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
partitionRegionFactory.setPartitionAttributes(partitionAttrs);
Region dataParRegion = partitionRegionFactory.create(DATA_PAR_REGION_NAME);
assertNotNull(dataParRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataParRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataParRegion);
dataParRegion = partitionRegionFactory.create(DATA_PAR_REGION_NAME_VM1);
assertNotNull(dataParRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataParRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataParRegion);
}
});
@@ -177,19 +177,19 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
RegionFactory regionFactory = cache.createRegionFactory(RegionShortcut.REPLICATE);
Region dataRegion = regionFactory.create(DATA_REGION_NAME);
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
dataRegion = dataRegion.createSubregion(DATA_REGION_NAME_CHILD_1, dataRegion.getAttributes());
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
dataRegion = dataRegion.createSubregion(DATA_REGION_NAME_CHILD_1_2, dataRegion.getAttributes());
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
dataRegion = regionFactory.create(DATA_REGION_NAME_VM2);
assertNotNull(dataRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataRegion);
PartitionAttributes partitionAttrs = new PartitionAttributesFactory().setRedundantCopies(2).create();
@@ -197,18 +197,18 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
partitionRegionFactory.setPartitionAttributes(partitionAttrs);
Region dataParRegion = partitionRegionFactory.create(DATA_PAR_REGION_NAME);
assertNotNull(dataParRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataParRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataParRegion);
dataParRegion = partitionRegionFactory.create(DATA_PAR_REGION_NAME_VM2);
assertNotNull(dataParRegion);
- LogWriterSupport.getLogWriter().info("Created Region " + dataParRegion);
+ LogWriterUtils.getLogWriter().info("Created Region " + dataParRegion);
}
});
final String vm1MemberId = (String) vm1.invoke(GemfireDataCommandsDUnitTest.class, "getMemberId");
final String vm2MemberId = (String) vm2.invoke(GemfireDataCommandsDUnitTest.class, "getMemberId");
- LogWriterSupport.getLogWriter().info("Vm1 ID : " + vm1MemberId);
- LogWriterSupport.getLogWriter().info("Vm2 ID : " + vm2MemberId);
+ LogWriterUtils.getLogWriter().info("Vm1 ID : " + vm1MemberId);
+ LogWriterUtils.getLogWriter().info("Vm2 ID : " + vm2MemberId);
final VM manager = Host.getHost(0).getVM(0);
@@ -224,10 +224,10 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
ManagerMXBean bean1 = service.getManagerMXBean();
DistributedRegionMXBean bean2 = service.getDistributedRegionMXBean(DATA_REGION_NAME_PATH);
if (bean1 == null) {
- LogWriterSupport.getLogWriter().info("Still probing for ManagerMBean");
+ LogWriterUtils.getLogWriter().info("Still probing for ManagerMBean");
return false;
} else {
- LogWriterSupport.getLogWriter().info("Still probing for DistributedRegionMXBean=" + bean2);
+ LogWriterUtils.getLogWriter().info("Still probing for DistributedRegionMXBean=" + bean2);
return (bean2 != null);
}
}
@@ -268,7 +268,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
}
if (!flag) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Still probing for regionMbeans " + DATA_REGION_NAME_PATH + "=" + beans[0] + " " + DATA_REGION_NAME_VM1_PATH + "=" + beans[1] + " " + DATA_REGION_NAME_VM2_PATH + "=" + beans[2] + " " + DATA_PAR_REGION_NAME_PATH + "=" + beans[3] + " " + DATA_PAR_REGION_NAME_VM1_PATH + "=" + beans[4] + " " + DATA_PAR_REGION_NAME_VM2_PATH + "=" + beans[5] + " "
//+ DATA_REGION_NAME_CHILD_1_PATH
// +"="+ beans[6] + " " + DATA_REGION_NAME_CHILD_1_2_PATH
@@ -276,7 +276,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
);
return false;
} else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Probing complete for regionMbeans " + DATA_REGION_NAME_PATH + "=" + beans[0] + " " + DATA_REGION_NAME_VM1_PATH + "=" + beans[1] + " " + DATA_REGION_NAME_VM2_PATH + "=" + beans[2] + " " + DATA_PAR_REGION_NAME_PATH + "=" + beans[3] + " " + DATA_PAR_REGION_NAME_VM1_PATH + "=" + beans[4] + " " + DATA_PAR_REGION_NAME_VM2_PATH + "=" + beans[5] + " "
//+ DATA_REGION_NAME_CHILD_1_PATH
// +"="+ beans[6] + " " + DATA_REGION_NAME_CHILD_1_2_PATH
@@ -306,7 +306,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
bean = service.getDistributedRegionMXBean(region);
assertNotNull(bean);
String[] membersName = bean.getMembers();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Members Array for region " + region + " : " + StringUtils.objectToString(membersName, true, 10));
if (bean.getMemberCount() < 1) fail(
"Even after waiting mbean reports number of member hosting region " + DATA_REGION_NAME_VM1_PATH + " is less than one");
@@ -371,7 +371,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
query = query.replace("?" + i, r);
i++;
}
- LogWriterSupport.getLogWriter().info("Checking members for query : " + query);
+ LogWriterUtils.getLogWriter().info("Checking members for query : " + query);
QCompiler compiler = new QCompiler();
Set<String> regionsInQuery = null;
try {
@@ -379,11 +379,11 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
Set regionSet = new HashSet();
compiledQuery.getRegionsInQuery(regionSet, null);//GFSH ENV VARIBLES
regionsInQuery = Collections.unmodifiableSet(regionSet);
- LogWriterSupport.getLogWriter().info("Region in query : " + regionsInQuery);
+ LogWriterUtils.getLogWriter().info("Region in query : " + regionsInQuery);
if (regionsInQuery.size() > 0) {
Set<DistributedMember> members = DataCommands.getQueryRegionsAssociatedMembers(regionsInQuery, cache,
returnAll);
- LogWriterSupport.getLogWriter().info("Members for Region in query : " + members);
+ LogWriterUtils.getLogWriter().info("Members for Region in query : " + members);
if (expectedMembers != -1) {
assertNotNull(members);
assertEquals(expectedMembers, members.size());
@@ -455,7 +455,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
MemberMXBean member = service.getMemberMXBean();
String cmdResult = member.processCommand(query);
assertNotNull(cmdResult);
- LogWriterSupport.getLogWriter().info("Text Command Output : " + cmdResult);
+ LogWriterUtils.getLogWriter().info("Text Command Output : " + cmdResult);
}
});
}
@@ -552,15 +552,15 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
Double doubleKey = Double.valueOf("12432.235425");
Double doubleValue = Double.valueOf("111111.111111");
- LogWriterSupport.getLogWriter().info("Testing Byte Wrappers");
+ LogWriterUtils.getLogWriter().info("Testing Byte Wrappers");
testGetPutLocateEntryFromShellAndGemfire(byteKey, byteValue, Byte.class, true, true);
- LogWriterSupport.getLogWriter().info("Testing Short Wrappers");
+ LogWriterUtils.getLogWriter().info("Testing Short Wrappers");
testGetPutLocateEntryFromShellAndGemfire(shortKey, shortValue, Short.class, true, true);
- LogWriterSupport.getLogWriter().info("Testing Integer Wrappers");
+ LogWriterUtils.getLogWriter().info("Testing Integer Wrappers");
testGetPutLocateEntryFromShellAndGemfire(integerKey, integerValue, Integer.class, true, true);
- LogWriterSupport.getLogWriter().info("Testing Float Wrappers");
+ LogWriterUtils.getLogWriter().info("Testing Float Wrappers");
testGetPutLocateEntryFromShellAndGemfire(floatKey, flaotValue, Float.class, true, true);
- LogWriterSupport.getLogWriter().info("Testing Double Wrappers");
+ LogWriterUtils.getLogWriter().info("Testing Double Wrappers");
testGetPutLocateEntryFromShellAndGemfire(doubleKey, doubleValue, Double.class, true, true);
}
@@ -762,7 +762,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
try {
for (String col : expectedCols) {
boolean found = false;
- LogWriterSupport.getLogWriter().info("Validating column " + col);
+ LogWriterUtils.getLogWriter().info("Validating column " + col);
for (int i = 0; i < array.size(); i++) {
String header = (String) array.get(i);
if (col.equals(header)) found = true;
@@ -783,7 +783,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
try {
for (String col : expectedCols) {
boolean found = false;
- LogWriterSupport.getLogWriter().info("Validating column " + col);
+ LogWriterUtils.getLogWriter().info("Validating column " + col);
for (int i = 0; i < array.size(); i++) {
String header = (String) array.get(i);
if (col.equals(header)) found = true;
@@ -817,7 +817,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
assertEquals(cols.length, array.size());
for (String col : cols) {
boolean found = false;
- LogWriterSupport.getLogWriter().info("Validating column " + col);
+ LogWriterUtils.getLogWriter().info("Validating column " + col);
for (int i = 0; i < array.size(); i++) {
String header = (String) array.get(i);
if (col.equals(header)) found = true;
@@ -1279,7 +1279,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
String valueJson = valueTemplate.replaceAll("\\?1", population);
valueJson = valueJson.replaceAll("\\?2", area);
valueJson = valueJson.replaceAll("\\?", keyString);
- LogWriterSupport.getLogWriter().info("Getting key with json key : " + keyJson);
+ LogWriterUtils.getLogWriter().info("Getting key with json key : " + keyJson);
command = command + " " + "--key=" + keyJson + " --region=" + DATA_REGION_NAME_PATH + " --key-class=" + Key1.class.getCanonicalName();
command = command + " --value-class=" + Value2.class.getCanonicalName();
CommandResult cmdResult = executeCommand(command);
@@ -1308,7 +1308,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
String valueJson = valueTemplate.replaceAll("\\?1", population);
valueJson = valueJson.replaceAll("\\?2", area);
valueJson = valueJson.replaceAll("\\?", keyString);
- LogWriterSupport.getLogWriter().info("Getting key with json key : " + keyJson);
+ LogWriterUtils.getLogWriter().info("Getting key with json key : " + keyJson);
command = command + " " + "--key=" + keyJson + " --region=" + DATA_REGION_NAME_PATH + " --key-class=" + Key1.class.getCanonicalName();
command = command + " --value-class=" + Value2.class.getCanonicalName();
CommandResult cmdResult = executeCommand(command);
@@ -1345,8 +1345,8 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
String valueJson = valueTemplate.replaceAll("\\?1", population);
valueJson = valueJson.replaceAll("\\?2", area);
valueJson = valueJson.replaceAll("\\?", keyString);
- LogWriterSupport.getLogWriter().info("Putting key with json key : " + keyJson);
- LogWriterSupport.getLogWriter().info("Putting key with json valye : " + valueJson);
+ LogWriterUtils.getLogWriter().info("Putting key with json key : " + keyJson);
+ LogWriterUtils.getLogWriter().info("Putting key with json valye : " + valueJson);
command = command + " " + "--key=" + keyJson + " --value=" + valueJson + " --region=" + DATA_REGION_NAME_PATH;
command = command + " --key-class=" + Key1.class.getCanonicalName() + " --value-class=" + Value2.class.getCanonicalName();
;
@@ -1374,8 +1374,8 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
valueJson = valueJson.replaceAll("\\?set", set);
valueJson = valueJson.replaceAll("\\?map", map);
- LogWriterSupport.getLogWriter().info("Putting key with json key : " + keyJson);
- LogWriterSupport.getLogWriter().info("Putting key with json valye : " + valueJson);
+ LogWriterUtils.getLogWriter().info("Putting key with json key : " + keyJson);
+ LogWriterUtils.getLogWriter().info("Putting key with json valye : " + valueJson);
command = command + " " + "--key=" + keyJson + " --value=" + valueJson + " --region=" + DATA_REGION_NAME_PATH;
command = command + " --key-class=" + Key1.class.getCanonicalName() + " --value-class=" + Car.class.getCanonicalName();
;
@@ -1485,7 +1485,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
String command = "remove";
String keyString = keyPrefix + i;
String keyJson = keyTemplate.replaceAll("\\?", keyString);
- LogWriterSupport.getLogWriter().info("Removing key with json key : " + keyJson);
+ LogWriterUtils.getLogWriter().info("Removing key with json key : " + keyJson);
command = command + " " + "--key=" + keyJson + " --region=" + DATA_REGION_NAME_PATH + " --key-class=" + Key1.class.getCanonicalName();
CommandResult cmdResult = executeCommand(command);
printCommandOutput(cmdResult);
@@ -1566,8 +1566,8 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
CommandResult cmdResult = executeCommand(commandString);
String resultAsString = commandResultToString(cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
- LogWriterSupport.getLogWriter().info("Command Output");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info("Command Output");
+ LogWriterUtils.getLogWriter().info(resultAsString);
vm1.invoke(new SerializableRunnable() {
public void run() {
@@ -1590,8 +1590,8 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(commandString);
resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Result of import data");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info("Result of import data");
+ LogWriterUtils.getLogWriter().info(resultAsString);
assertEquals(Result.Status.OK, cmdResult.getStatus());
/**
@@ -1616,8 +1616,8 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(commandString);
resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Result of import data with wrong region name");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info("Result of import data with wrong region name");
+ LogWriterUtils.getLogWriter().info(resultAsString);
assertEquals(Result.Status.ERROR, cmdResult.getStatus());
csb = new CommandStringBuilder(CliStrings.IMPORT_DATA);
@@ -1628,8 +1628,8 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(commandString);
resultAsString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("Result of import data with wrong file");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info("Result of import data with wrong file");
+ LogWriterUtils.getLogWriter().info(resultAsString);
assertEquals(Result.Status.ERROR, cmdResult.getStatus());
} finally {
@@ -1689,7 +1689,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
final DistributedRegionMXBean bean = service.getDistributedRegionMXBean(
Region.SEPARATOR + REBALANCE_REGION_NAME);
if (bean == null) {
- LogWriterSupport.getLogWriter().info("Still probing for checkRegionMBeans ManagerMBean");
+ LogWriterUtils.getLogWriter().info("Still probing for checkRegionMBeans ManagerMBean");
return false;
} else {
// verify that bean is proper before executing tests
@@ -1719,13 +1719,13 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
//check if DistributedRegionMXBean is available so that command will not fail
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForTimeOut verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForTimeOut verified Mbean and executin command");
String command = "rebalance --time-out=1";
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForTimeOut just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForTimeOut just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForTimeOut stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForTimeOut stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceCommandForTimeOut failed as did not get CommandResult");
@@ -1739,16 +1739,16 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForTimeOutForRegion verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForTimeOutForRegion verified Mbean and executin command");
String command = "rebalance --time-out=1 --include-region=" + "/" + REBALANCE_REGION_NAME;
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForTimeOutForRegion just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForTimeOutForRegion just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForTimeOutForRegion stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForTimeOutForRegion stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceCommandForTimeOut failed as did not get CommandResult");
@@ -1762,13 +1762,13 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForSimulate verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForSimulate verified Mbean and executin command");
String command = "rebalance --simulate=true --include-region=" + "/" + REBALANCE_REGION_NAME;
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForSimulate just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForSimulate just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForSimulate stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForSimulate stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceCommandForSimulate failed as did not get CommandResult");
@@ -1782,16 +1782,16 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForSimulateWithNoMember verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForSimulateWithNoMember verified Mbean and executin command");
String command = "rebalance --simulate=true";
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForSimulateWithNoMember just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForSimulateWithNoMember just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceCommandForSimulateWithNoMember stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceCommandForSimulateWithNoMember stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceCommandForSimulateWithNoMember failed as did not get CommandResult");
@@ -1805,13 +1805,13 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
//check if DistributedRegionMXBean is available so that command will not fail
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceForIncludeRegionFunction verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testRebalanceForIncludeRegionFunction verified Mbean and executin command");
String command = "rebalance --include-region=" + "/" + REBALANCE_REGION_NAME + ",/" + REBALANCE_REGION2_NAME;
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceForIncludeRegionFunction just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceForIncludeRegionFunction just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceForIncludeRegionFunction stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceForIncludeRegionFunction stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceForIncludeRegionFunction failed as did not get CommandResult");
@@ -1824,17 +1824,17 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testSimulateForEntireDS verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testSimulateForEntireDS verified Mbean and executin command");
String command = "rebalance --simulate=true";
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testSimulateForEntireDS just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testSimulateForEntireDS just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testSimulateForEntireDS stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testSimulateForEntireDS stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceForIncludeRegionFunction failed as did not get CommandResult");
@@ -1846,13 +1846,13 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
//check if DistributedRegionMXBean is available so that command will not fail
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceForEntireDS verified Mbean and executin command");
+ LogWriterUtils.getLogWriter().info("testRebalanceForEntireDS verified Mbean and executin command");
String command = "rebalance";
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceForEntireDS just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceForEntireDS just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceForEntireDS stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceForEntireDS stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceForIncludeRegionFunction failed as did not get CommandResult");
@@ -1903,14 +1903,14 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
private static void printCommandOutput(CommandResult cmdResult) {
assertNotNull(cmdResult);
- LogWriterSupport.getLogWriter().info("Command Output : ");
+ LogWriterUtils.getLogWriter().info("Command Output : ");
StringBuilder sb = new StringBuilder();
cmdResult.resetToFirstLine();
while (cmdResult.hasNextLine()) {
sb.append(cmdResult.nextLine()).append(DataCommandRequest.NEW_LINE);
}
- LogWriterSupport.getLogWriter().info(sb.toString());
- LogWriterSupport.getLogWriter().info("");
+ LogWriterUtils.getLogWriter().info(sb.toString());
+ LogWriterUtils.getLogWriter().info("");
}
public static class Value1WithValue2 extends Value1 {
@@ -1937,15 +1937,15 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
final VM manager = Host.getHost(0).getVM(0);
manager.invoke(checkRegionMBeans);
- LogWriterSupport.getLogWriter().info("testRebalanceForExcludeRegionFunction verified Mbean and executing command");
+ LogWriterUtils.getLogWriter().info("testRebalanceForExcludeRegionFunction verified Mbean and executing command");
String command = "rebalance --exclude-region=" + "/" + REBALANCE_REGION2_NAME;
- LogWriterSupport.getLogWriter().info("testRebalanceForExcludeRegionFunction command : " + command);
+ LogWriterUtils.getLogWriter().info("testRebalanceForExcludeRegionFunction command : " + command);
CommandResult cmdResult = executeCommand(command);
- LogWriterSupport.getLogWriter().info("testRebalanceForExcludeRegionFunction just after executing " + cmdResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceForExcludeRegionFunction just after executing " + cmdResult);
if (cmdResult != null) {
String stringResult = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testRebalanceForExcludeRegionFunction stringResult : " + stringResult);
+ LogWriterUtils.getLogWriter().info("testRebalanceForExcludeRegionFunction stringResult : " + stringResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testRebalanceForIncludeRegionFunction failed as did not get CommandResult");
@@ -1968,19 +1968,19 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
ManagerMXBean bean1 = service.getManagerMXBean();
DistributedRegionMXBean bean2 = service.getDistributedRegionMXBean(regionName);
if (bean1 == null) {
- LogWriterSupport.getLogWriter().info("waitForListClientMbean Still probing for ManagerMBean");
+ LogWriterUtils.getLogWriter().info("waitForListClientMbean Still probing for ManagerMBean");
return false;
} else {
- LogWriterSupport.getLogWriter().info("waitForListClientMbean Still probing for DistributedRegionMXBean=" + bean2);
+ LogWriterUtils.getLogWriter().info("waitForListClientMbean Still probing for DistributedRegionMXBean=" + bean2);
if (bean2 == null) {
bean2 = service.getDistributedRegionMXBean(Region.SEPARATOR + regionName);
}
if (bean2 == null) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"waitForListClientMbean Still probing for DistributedRegionMXBean with separator = " + bean2);
return false;
} else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"waitForListClientMbean Still probing for DistributedRegionMXBean with separator Not null " + bean2.getMembers().length);
if (bean2.getMembers().length > 1) {
return true;
@@ -2033,7 +2033,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
}
});
- LogWriterSupport.getLogWriter().info("testRegionsViaMbeanAndFunctions memSizeFromMbean= " + memSizeFromMbean);
+ LogWriterUtils.getLogWriter().info("testRegionsViaMbeanAndFunctions memSizeFromMbean= " + memSizeFromMbean);
String memSizeFromFunctionCall = (String) manager.invoke(new SerializableCallable() {
public Object call() {
@@ -2043,7 +2043,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
}
});
- LogWriterSupport.getLogWriter().info("testRegionsViaMbeanAndFunctions memSizeFromFunctionCall= " + memSizeFromFunctionCall);
+ LogWriterUtils.getLogWriter().info("testRegionsViaMbeanAndFunctions memSizeFromFunctionCall= " + memSizeFromFunctionCall);
assertTrue(memSizeFromFunctionCall.equals(memSizeFromMbean));
}
@@ -2072,7 +2072,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
}
});
- LogWriterSupport.getLogWriter().info("testRegionsViaMbeanAndFunctionsForPartRgn memSizeFromMbean= " + memSizeFromMbean);
+ LogWriterUtils.getLogWriter().info("testRegionsViaMbeanAndFunctionsForPartRgn memSizeFromMbean= " + memSizeFromMbean);
String memSizeFromFunctionCall = (String) manager.invoke(new SerializableCallable() {
public Object call() {
@@ -2081,7 +2081,7 @@ public class GemfireDataCommandsDUnitTest extends CliCommandTestBase {
}
});
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testRegionsViaMbeanAndFunctionsForPartRgn memSizeFromFunctionCall= " + memSizeFromFunctionCall);
assertTrue(memSizeFromFunctionCall.equals(memSizeFromMbean));
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GetCommandOnRegionWithCacheLoaderDuringCacheMissDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GetCommandOnRegionWithCacheLoaderDuringCacheMissDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GetCommandOnRegionWithCacheLoaderDuringCacheMissDUnitTest.java
index d4bcb27..7f161fe 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GetCommandOnRegionWithCacheLoaderDuringCacheMissDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/GetCommandOnRegionWithCacheLoaderDuringCacheMissDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData;
import com.gemstone.gemfire.management.internal.cli.result.ResultData;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.SerializableRunnableIF;
import com.gemstone.gemfire.test.dunit.VM;
@@ -197,7 +197,7 @@ public class GetCommandOnRegionWithCacheLoaderDuringCacheMissDUnitTest extends C
protected void log(final String tag, final String message) {
//System.out.printf("%1$s (%2$s)%n", tag, message);
- LogWriterSupport.getLogWriter().info(String.format("%1$s (%2$s)%n", tag, message));
+ LogWriterUtils.getLogWriter().info(String.format("%1$s (%2$s)%n", tag, message));
}
protected CommandResult runCommand(final String command) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/IndexCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/IndexCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/IndexCommandsDUnitTest.java
index 66b914d..431c6b3 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/IndexCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/IndexCommandsDUnitTest.java
@@ -37,7 +37,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -741,8 +741,8 @@ public class IndexCommandsDUnitTest extends CliCommandTestBase {
}
private void writeToLog(String text, String resultAsString) {
- LogWriterSupport.getLogWriter().info(getTestMethodName() + "\n");
- LogWriterSupport.getLogWriter().info(resultAsString);
+ LogWriterUtils.getLogWriter().info(getTestMethodName() + "\n");
+ LogWriterUtils.getLogWriter().info(resultAsString);
}
private void setupSystem() {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeDiskStoreCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeDiskStoreCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeDiskStoreCommandsDUnitTest.java
index 1d37c49..386b8ed 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeDiskStoreCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeDiskStoreCommandsDUnitTest.java
@@ -29,7 +29,7 @@ import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.management.cli.Result;
import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.SerializableRunnableIF;
import com.gemstone.gemfire.test.dunit.VM;
@@ -91,7 +91,7 @@ public class ListAndDescribeDiskStoreCommandsDUnitTest extends CliCommandTestBas
protected Properties createDistributedSystemProperties(final String gemfireName) {
final Properties distributedSystemProperties = new Properties();
- distributedSystemProperties.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ distributedSystemProperties.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
distributedSystemProperties.setProperty(DistributionConfig.NAME_NAME, gemfireName);
return distributedSystemProperties;
@@ -127,7 +127,7 @@ public class ListAndDescribeDiskStoreCommandsDUnitTest extends CliCommandTestBas
final Result result = executeCommand(CliStrings.LIST_DISK_STORE);
assertNotNull(result);
- LogWriterSupport.getLogWriter().info(toString(result));
+ LogWriterUtils.getLogWriter().info(toString(result));
assertEquals(Result.Status.OK, result.getStatus());
}
@@ -136,7 +136,7 @@ public class ListAndDescribeDiskStoreCommandsDUnitTest extends CliCommandTestBas
CliStrings.DESCRIBE_DISK_STORE + " --member=producerServer --name=producerData");
assertNotNull(result);
- LogWriterSupport.getLogWriter().info(toString(result));
+ LogWriterUtils.getLogWriter().info(toString(result));
assertEquals(Result.Status.OK, result.getStatus());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeRegionDUnitTest.java
index 4956541..2d11491 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListAndDescribeRegionDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.management.internal.cli.util.RegionAttributesNames;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -193,8 +193,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
String commandString = csb.toString();
CommandResult commandResult = executeCommand(commandString);
String commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(PR1));
assertTrue(commandResultAsString.contains(LOCALREGIONONMANAGER));
@@ -208,8 +208,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
commandString = csb.toString();
commandResult = executeCommand(commandString);
commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(PR1));
assertTrue(commandResultAsString.contains(LOCALREGIONONMANAGER));
@@ -219,8 +219,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
commandString = csb.toString();
commandResult = executeCommand(commandString);
commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(PR1));
assertTrue(commandResultAsString.contains(REGION1));
@@ -233,8 +233,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
commandString = csb.toString();
commandResult = executeCommand(commandString);
commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(PR1));
assertTrue(commandResultAsString.contains(LOCALREGIONONMANAGER));
@@ -244,8 +244,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
commandString = csb.toString();
commandResult = executeCommand(commandString);
commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(PR1));
assertTrue(commandResultAsString.contains(REGION1));
@@ -261,8 +261,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
String commandString = csb.toString();
CommandResult commandResult = executeCommand(commandString);
String commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(PR1));
assertTrue(commandResultAsString.contains("Server1"));
@@ -272,8 +272,8 @@ public class ListAndDescribeRegionDUnitTest extends CliCommandTestBase {
commandString = csb.toString();
commandResult = executeCommand(commandString);
commandResultAsString = commandResultToString(commandResult);
- LogWriterSupport.getLogWriter().info("Command String : " + commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultAsString);
+ LogWriterUtils.getLogWriter().info("Command String : " + commandString);
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultAsString);
assertEquals(Status.OK, commandResult.getStatus());
assertTrue(commandResultAsString.contains(LOCALREGIONONMANAGER));
assertTrue(commandResultAsString.contains("Manager"));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
index 176155f..b7c42dc 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.management.cli.Result;
import com.gemstone.gemfire.management.internal.cli.domain.IndexDetails;
import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.SerializableRunnableIF;
import com.gemstone.gemfire.test.dunit.VM;
@@ -138,7 +138,7 @@ public class ListIndexCommandDUnitTest extends CliCommandTestBase {
protected Properties createDistributedSystemProperties(final String gemfireName) {
final Properties distributedSystemProperties = new Properties();
- distributedSystemProperties.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ distributedSystemProperties.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
distributedSystemProperties.setProperty(DistributionConfig.NAME_NAME, gemfireName);
return distributedSystemProperties;
@@ -176,7 +176,7 @@ public class ListIndexCommandDUnitTest extends CliCommandTestBase {
}
}
} catch (Exception e) {
- LogWriterSupport.getLogWriter().error(
+ LogWriterUtils.getLogWriter().error(
String.format("Error occurred creating Index (%1$s) on Region (%2$s) - (%3$s)", indexName,
region.getFullPath(), e.getMessage()));
}
@@ -278,11 +278,11 @@ public class ListIndexCommandDUnitTest extends CliCommandTestBase {
@SuppressWarnings("unchecked")
protected <T extends Comparable<T>, B extends AbstractBean<T>> B query(final Cache cache, final String queryString) {
try {
- LogWriterSupport.getLogWriter().info(String.format("Running Query (%1$s) in GemFire...", queryString));
+ LogWriterUtils.getLogWriter().info(String.format("Running Query (%1$s) in GemFire...", queryString));
final SelectResults<B> results = (SelectResults<B>) cache.getQueryService().newQuery(queryString).execute();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
String.format("Running Query (%1$s) in GemFire returned (%2$d) result(s).", queryString, results.size()));
return (results.iterator().hasNext() ? results.iterator().next() : null);
@@ -294,12 +294,12 @@ public class ListIndexCommandDUnitTest extends CliCommandTestBase {
protected <T extends Comparable<T>, B extends AbstractBean<T>> B query(final Region<T, B> region,
final String queryPredicate) {
try {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
String.format("Running Query (%1$s) on Region (%2$s)...", queryPredicate, region.getFullPath()));
final SelectResults<B> results = region.query(queryPredicate);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
String.format("Running Query (%1$s) on Region (%2$s) returned (%3$d) result(s).", queryPredicate,
region.getFullPath(), results.size()));
@@ -315,7 +315,7 @@ public class ListIndexCommandDUnitTest extends CliCommandTestBase {
final Result result = executeCommand(CliStrings.LIST_INDEX + " --" + CliStrings.LIST_INDEX__STATS);
assertNotNull(result);
- LogWriterSupport.getLogWriter().info(toString(result));
+ LogWriterUtils.getLogWriter().info(toString(result));
assertEquals(Result.Status.OK, result.getStatus());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
index 3cc09e2..7d6efc5 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
@@ -39,8 +39,8 @@ import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.management.internal.cli.remote.CommandProcessor;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -181,7 +181,7 @@ public class MemberCommandsDUnitTest extends CacheTestCase {
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
- props.setProperty(DistributionConfig.LOCATORS_NAME, NetworkSupport.getServerHostName(host) + "[" + locatorPort + "]");
+ props.setProperty(DistributionConfig.LOCATORS_NAME, NetworkUtils.getServerHostName(host) + "[" + locatorPort + "]");
props.setProperty(DistributionConfig.LOG_LEVEL_NAME, "info");
props.setProperty(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true");
props.setProperty(DistributionConfig.ENABLE_TIME_STATISTICS_NAME, "true");
@@ -208,7 +208,7 @@ public class MemberCommandsDUnitTest extends CacheTestCase {
setupSystem();
CommandProcessor commandProcessor = new CommandProcessor();
Result result = commandProcessor.createCommandStatement(CliStrings.LIST_MEMBER, EMPTY_ENV).process();
- LogWriterSupport.getLogWriter().info("#SB" + getResultAsString(result));
+ LogWriterUtils.getLogWriter().info("#SB" + getResultAsString(result));
assertEquals(true, result.getStatus().equals(Status.OK));
}
@@ -231,7 +231,7 @@ public class MemberCommandsDUnitTest extends CacheTestCase {
CommandProcessor commandProcessor = new CommandProcessor();
Result result = commandProcessor.createCommandStatement(CliStrings.LIST_MEMBER, EMPTY_ENV).process();
- LogWriterSupport.getLogWriter().info("#SB" + getResultAsString(result));
+ LogWriterUtils.getLogWriter().info("#SB" + getResultAsString(result));
assertEquals(true, result.getStatus().equals(Status.ERROR));
} finally {
locator.stop(); // fix for bug 46562
@@ -250,7 +250,7 @@ public class MemberCommandsDUnitTest extends CacheTestCase {
CommandStringBuilder csb = new CommandStringBuilder(CliStrings.LIST_MEMBER);
csb.addOption(CliStrings.LIST_MEMBER__GROUP, "G1");
Result result = commandProcessor.createCommandStatement(csb.toString(), EMPTY_ENV).process();
- LogWriterSupport.getLogWriter().info("#SB" + getResultAsString(result));
+ LogWriterUtils.getLogWriter().info("#SB" + getResultAsString(result));
assertEquals(true, result.getStatus().equals(Status.OK));
}
@@ -273,7 +273,7 @@ public class MemberCommandsDUnitTest extends CacheTestCase {
Result result = commandProcessor.createCommandStatement("describe member --name=" + member.getId(),
EMPTY_ENV).process();
assertEquals(true, result.getStatus().equals(Status.OK));
- LogWriterSupport.getLogWriter().info("#SB" + getResultAsString(result));
+ LogWriterUtils.getLogWriter().info("#SB" + getResultAsString(result));
//assertEquals(true, result.getStatus().equals(Status.OK));
}
}
[14/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart3DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart3DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart3DUnitTest.java
index f01f6b2..ac5d8b7 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart3DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart3DUnitTest.java
@@ -21,7 +21,7 @@ import com.gemstone.gemfire.internal.cache.ClientServerObserverHolder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
/**
@@ -52,7 +52,7 @@ public class RedundancyLevelPart3DUnitTest extends RedundancyLevelTestBase
{
try {
CacheServerTestUtil.disableShufflingOfEndpoints();
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 3);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 3);
createEntriesK1andK2();
registerK1AndK2();
assertEquals(3, pool.getRedundantNames().size());
@@ -156,7 +156,7 @@ public class RedundancyLevelPart3DUnitTest extends RedundancyLevelTestBase
{
try {
CacheServerTestUtil.disableShufflingOfEndpoints();
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 0);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 0);
createEntriesK1andK2();
registerK1AndK2();
assertEquals(0, pool.getRedundantNames().size());
@@ -208,7 +208,7 @@ public class RedundancyLevelPart3DUnitTest extends RedundancyLevelTestBase
try {
// long maxWaitTime = 60000;
CacheServerTestUtil.disableShufflingOfEndpoints();
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
createEntriesK1andK2();
registerK1AndK2();
assertEquals(1, pool.getRedundantNames().size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelTestBase.java
index dcf80c0..b110870 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelTestBase.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -124,7 +124,7 @@ public class RedundancyLevelTestBase extends DistributedTestCase
PORT4 = ((Integer)server3.invoke(RedundancyLevelTestBase.class,
"createServerCache")).intValue();
- String hostName = NetworkSupport.getServerHostName(Host.getHost(0));
+ String hostName = NetworkUtils.getServerHostName(Host.getHost(0));
SERVER1 = hostName + PORT1;
SERVER2 = hostName + PORT2;
SERVER3 = hostName + PORT3;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegionCloseDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegionCloseDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegionCloseDUnitTest.java
index ba8b456..b00c85b 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegionCloseDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegionCloseDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -79,7 +79,7 @@ public class RegionCloseDUnitTest extends DistributedTestCase
PORT1 = ((Integer)server1.invoke(RegionCloseDUnitTest.class, "createServerCache" )).intValue();
client1.invoke(RegionCloseDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(host), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(host), new Integer(PORT1)});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestBeforeRegionCreationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestBeforeRegionCreationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestBeforeRegionCreationDUnitTest.java
index a2c7ac1..f705664 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestBeforeRegionCreationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestBeforeRegionCreationDUnitTest.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -207,11 +207,11 @@ public class RegisterInterestBeforeRegionCreationDUnitTest extends DistributedTe
//client1 connected to server1
client1.invoke(RegisterInterestBeforeRegionCreationDUnitTest.class, "createClient",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
//client2 connected to server2
client2.invoke(RegisterInterestBeforeRegionCreationDUnitTest.class, "createClient",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2) });
}
public static Integer createServer(Boolean createRegion) throws Exception
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestKeysDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestKeysDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestKeysDUnitTest.java
index 5b726ca..5541a6a 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestKeysDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RegisterInterestKeysDUnitTest.java
@@ -26,8 +26,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.cache.client.*;
@@ -89,15 +89,15 @@ public class RegisterInterestKeysDUnitTest extends DistributedTestCase
host.getVM(i).invoke(getClass(), "createImpl", null);
}
- LogWriterSupport.getLogWriter().info("implementation class is " + impl.getClass());
+ LogWriterUtils.getLogWriter().info("implementation class is " + impl.getClass());
PORT1 = ((Integer)server1.invoke(impl.getClass(), "createServerCache" )).intValue();
PORT2 = ((Integer)server2.invoke(impl.getClass(), "createServerCache" )).intValue();
client1.invoke(impl.getClass(), "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
client2.invoke(impl.getClass(), "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ReliableMessagingDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ReliableMessagingDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ReliableMessagingDUnitTest.java
index e11da5a..e0fc7ac 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ReliableMessagingDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ReliableMessagingDUnitTest.java
@@ -44,8 +44,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -117,11 +117,11 @@ public class ReliableMessagingDUnitTest extends DistributedTestCase
createEntries();
setClientServerObserverForBeforeSendingClientAck();
server1.invoke(ReliableMessagingDUnitTest.class, "putOnServer");
- LogWriterSupport.getLogWriter().info("Entering waitForServerUpdate");
+ LogWriterUtils.getLogWriter().info("Entering waitForServerUpdate");
waitForServerUpdate();
- LogWriterSupport.getLogWriter().info("Entering waitForCallback");
+ LogWriterUtils.getLogWriter().info("Entering waitForCallback");
waitForCallback();
- LogWriterSupport.getLogWriter().info("Entering waitForClientAck");
+ LogWriterUtils.getLogWriter().info("Entering waitForClientAck");
waitForClientAck();
server2.invoke(ReliableMessagingDUnitTest.class, "checkTidAndSeq");
}
@@ -151,7 +151,7 @@ public class ReliableMessagingDUnitTest extends DistributedTestCase
(System.currentTimeMillis() - start) < maxWaitTime);
sleep(1000);
}
- LogWriterSupport.getLogWriter().info("seo = " + seo);
+ LogWriterUtils.getLogWriter().info("seo = " + seo);
assertTrue("Creation time " + creationTime + " supposed to be same as seo "
+ seo.getCreationTime(), creationTime == seo.getCreationTime());
}
@@ -180,7 +180,7 @@ public class ReliableMessagingDUnitTest extends DistributedTestCase
.getValue();
assertFalse(seo.getAckSend());
creationTime = seo.getCreationTime();
- LogWriterSupport.getLogWriter().info("seo is " + seo.toString());
+ LogWriterUtils.getLogWriter().info("seo is " + seo.toString());
assertTrue("Creation time not set", creationTime != 0);
Object args[] =
@@ -299,13 +299,13 @@ public class ReliableMessagingDUnitTest extends DistributedTestCase
origObserver = ClientServerObserverHolder.setInstance(new ClientServerObserverAdapter() {
public void beforeSendingClientAck()
{
- LogWriterSupport.getLogWriter().info("beforeSendingClientAck invoked");
+ LogWriterUtils.getLogWriter().info("beforeSendingClientAck invoked");
setCreationTimeTidAndSeq();
server1.invoke(ReliableMessagingDUnitTest.class, "stopServer");
checkServerCount(1,1);
server2.invoke(ReliableMessagingDUnitTest.class, "checkEmptyDispatchedMsgs");
PoolImpl.BEFORE_SENDING_CLIENT_ACK_CALLBACK_FLAG = false;
- LogWriterSupport.getLogWriter().info("end of beforeSendingClientAck");
+ LogWriterUtils.getLogWriter().info("end of beforeSendingClientAck");
}
});
}
@@ -376,7 +376,7 @@ public class ReliableMessagingDUnitTest extends DistributedTestCase
server.setPort(port);
server.setNotifyBySubscription(true);
server.start();
- LogWriterSupport.getLogWriter().info("Server started at PORT = " + port);
+ LogWriterUtils.getLogWriter().info("Server started at PORT = " + port);
return new Integer(server.getPort());
}
@@ -388,7 +388,7 @@ public class ReliableMessagingDUnitTest extends DistributedTestCase
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
cache = test.createCache(props);
- String host = NetworkSupport.getServerHostName(Host.getHost(0));
+ String host = NetworkUtils.getServerHostName(Host.getHost(0));
PoolImpl p = (PoolImpl)PoolManager.createFactory()
.addServer(host, PORT1)
.addServer(host, PORT2)
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UnregisterInterestDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UnregisterInterestDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UnregisterInterestDUnitTest.java
index 2538d70..6078f58 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UnregisterInterestDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UnregisterInterestDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.internal.cache.FilterProfile;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -313,7 +313,7 @@ public class UnregisterInterestDUnitTest extends DistributedTestCase {
DistributedSystem ds = new UnregisterInterestDUnitTest("UnregisterInterestDUnitTest").getSystem();
ds.disconnect();
Properties props = new Properties();
- props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
CacheFactory cf = new CacheFactory(props);
cache = cf.create();
RegionFactory rf = ((GemFireCacheImpl)cache).createRegionFactory(RegionShortcut.REPLICATE);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UpdatePropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UpdatePropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UpdatePropagationDUnitTest.java
index a3d0878..f9c2160 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UpdatePropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/UpdatePropagationDUnitTest.java
@@ -48,7 +48,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -128,9 +128,9 @@ public class UpdatePropagationDUnitTest extends DistributedTestCase
PORT2 = ((Integer)server2.invoke(getClass(), "createServerCache" )).intValue();
client1.invoke(getClass(), "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
client2.invoke(getClass(), "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
IgnoredException.addIgnoredException("java.net.SocketException");
IgnoredException.addIgnoredException("Unexpected IOException");
@@ -257,7 +257,7 @@ public class UpdatePropagationDUnitTest extends DistributedTestCase
// Client1 should not receive updated value while client2 should receive
client1.invoke(impl.getClass(),
"acquireConnectionsAndPutonK1andK2",
- new Object[] { NetworkSupport.getServerHostName(client1.getHost())});
+ new Object[] { NetworkUtils.getServerHostName(client1.getHost())});
//pause(5000);
//Check if both the puts ( on key1 & key2 ) have reached the servers
server1.invoke(impl.getClass(), "verifyUpdates");
@@ -383,7 +383,7 @@ public class UpdatePropagationDUnitTest extends DistributedTestCase
// Client1 should not receive updated value while client2 should receive
client1.invoke(impl.getClass(),
"acquireConnectionsAndPutonK1andK2",
- new Object[] { NetworkSupport.getServerHostName(client1.getHost())});
+ new Object[] { NetworkUtils.getServerHostName(client1.getHost())});
Wait.pause(5000);
//Check if both the puts ( on key1 & key2 ) have reached the servers
server1.invoke(impl.getClass(), "verifyUpdates");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/VerifyUpdatesFromNonInterestEndPointDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/VerifyUpdatesFromNonInterestEndPointDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/VerifyUpdatesFromNonInterestEndPointDUnitTest.java
index 9fbdfdb..d402c4a 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/VerifyUpdatesFromNonInterestEndPointDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/VerifyUpdatesFromNonInterestEndPointDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.cache.client.*;
@@ -87,7 +87,7 @@ public class VerifyUpdatesFromNonInterestEndPointDUnitTest extends DistributedTe
PORT2 = ((Integer)vm1.invoke(VerifyUpdatesFromNonInterestEndPointDUnitTest.class, "createServerCache" )).intValue();
vm2.invoke(VerifyUpdatesFromNonInterestEndPointDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1),new Integer(PORT2)});
+ new Object[] { NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1),new Integer(PORT2)});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/versions/RegionVersionVectorJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/versions/RegionVersionVectorJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/versions/RegionVersionVectorJUnitTest.java
index c992012..df85998 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/versions/RegionVersionVectorJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/versions/RegionVersionVectorJUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.internal.HeapDataOutputStream;
import com.gemstone.gemfire.internal.InternalDataSerializer;
import com.gemstone.gemfire.internal.Version;
import com.gemstone.gemfire.internal.cache.persistence.DiskStoreID;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.junit.categories.UnitTest;
@Category(UnitTest.class)
@@ -56,7 +56,7 @@ public class RegionVersionVectorJUnitTest extends TestCase {
public void testRegionVersionVectors() throws Exception {
// this is just a quick set of unit tests for basic RVV functionality
- final String local = NetworkSupport.getIPLiteral();
+ final String local = NetworkUtils.getIPLiteral();
InternalDistributedMember server1 = new InternalDistributedMember(local, 101);
InternalDistributedMember server2 = new InternalDistributedMember(local, 102);
InternalDistributedMember server3 = new InternalDistributedMember(local, 103);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/AsyncEventQueueTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/AsyncEventQueueTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/AsyncEventQueueTestBase.java
index 2b5f2be..17c4558 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/AsyncEventQueueTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/AsyncEventQueueTestBase.java
@@ -79,7 +79,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -449,7 +449,7 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
else {
persistentDirectory = new File(diskStoreName);
}
- LogWriterSupport.getLogWriter().info("The ds is : " + persistentDirectory.getName());
+ LogWriterUtils.getLogWriter().info("The ds is : " + persistentDirectory.getName());
persistentDirectory.mkdir();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
File[] dirs1 = new File[] { persistentDirectory };
@@ -1022,12 +1022,12 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
try {
RebalanceResults simulateResults = null;
if (!heapEviction) {
- LogWriterSupport.getLogWriter().info("Calling rebalance simulate");
+ LogWriterUtils.getLogWriter().info("Calling rebalance simulate");
RebalanceOperation simulateOp = factory.simulate();
simulateResults = simulateOp.getResults();
}
- LogWriterSupport.getLogWriter().info("Starting rebalancing");
+ LogWriterUtils.getLogWriter().info("Starting rebalancing");
RebalanceOperation rebalanceOp = factory.start();
RebalanceResults rebalanceResults = rebalanceOp.getResults();
@@ -1406,7 +1406,7 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
for (int bucketId : bucketIds) {
List<GatewaySenderEventImpl> eventsForBucket = bucketToEventsMap
.get(bucketId);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Events for bucket: " + bucketId + " is " + eventsForBucket);
assertNotNull(eventsForBucket);
for (int i = 0; i < batchSize; i++) {
@@ -1428,7 +1428,7 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
final Map eventsMap = ((MyAsyncEventListener)theListener).getEventsMap();
assertNotNull(eventsMap);
- LogWriterSupport.getLogWriter().info("The events map size is " + eventsMap.size());
+ LogWriterUtils.getLogWriter().info("The events map size is " + eventsMap.size());
return eventsMap.size();
}
@@ -1488,7 +1488,7 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
}
}
if (sender.isPrimary()) {
- LogWriterSupport.getLogWriter().info("Gateway sender is killed by a test");
+ LogWriterUtils.getLogWriter().info("Gateway sender is killed by a test");
cache.getDistributedSystem().disconnect();
return Boolean.TRUE;
}
@@ -1511,7 +1511,7 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
}
}
if (queue.isPrimary()) {
- LogWriterSupport.getLogWriter().info("AsyncEventQueue is killed by a test");
+ LogWriterUtils.getLogWriter().info("AsyncEventQueue is killed by a test");
cache.getDistributedSystem().disconnect();
return Boolean.TRUE;
}
@@ -1519,10 +1519,10 @@ public class AsyncEventQueueTestBase extends DistributedTestCase {
}
public static void killSender() {
- LogWriterSupport.getLogWriter().info("Gateway sender is going to be killed by a test");
+ LogWriterUtils.getLogWriter().info("Gateway sender is going to be killed by a test");
cache.close();
cache.getDistributedSystem().disconnect();
- LogWriterSupport.getLogWriter().info("Gateway sender is killed by a test");
+ LogWriterUtils.getLogWriter().info("Gateway sender is killed by a test");
}
public static class MyLocatorCallback extends LocatorDiscoveryCallbackAdapter {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/asyncqueue/AsyncEventListenerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/asyncqueue/AsyncEventListenerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/asyncqueue/AsyncEventListenerDUnitTest.java
index c15facb..02ed4ef 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/asyncqueue/AsyncEventListenerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/asyncqueue/AsyncEventListenerDUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.wan.AsyncEventQueueTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
@@ -645,8 +645,8 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
int vm5size = (Integer)vm5.invoke(AsyncEventQueueTestBase.class,
"getAsyncEventListenerMapSize", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("vm4 size is: " + vm4size);
- LogWriterSupport.getLogWriter().info("vm5 size is: " + vm5size);
+ LogWriterUtils.getLogWriter().info("vm4 size is: " + vm4size);
+ LogWriterUtils.getLogWriter().info("vm5 size is: " + vm5size);
// verify that there is no event loss
assertTrue(
"Total number of entries in events map on vm4 and vm5 should be at least 2000",
@@ -1662,14 +1662,14 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
vm4.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created the cache");
+ LogWriterUtils.getLogWriter().info("Created the cache");
vm4.invoke(AsyncEventQueueTestBase.class, "createAsyncEventQueueWithListener2",
new Object[] { "ln", true, 100, 5, false, null });
vm5.invoke(AsyncEventQueueTestBase.class, "createAsyncEventQueueWithListener2",
new Object[] { "ln", true, 100, 5, false, null });
- LogWriterSupport.getLogWriter().info("Created the AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Created the AsyncEventQueue");
vm4.invoke(AsyncEventQueueTestBase.class,
"createPRWithRedundantCopyWithAsyncEventQueue", new Object[] {
@@ -1678,7 +1678,7 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
"createPRWithRedundantCopyWithAsyncEventQueue", new Object[] {
getTestMethodName() + "_PR", "ln", isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created PR with AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Created PR with AsyncEventQueue");
vm4
.invoke(AsyncEventQueueTestBase.class, "pauseAsyncEventQueue",
@@ -1689,18 +1689,18 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
Wait.pause(1000);// pause for the batchTimeInterval to make sure the AsyncQueue
// is paused
- LogWriterSupport.getLogWriter().info("Paused the AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Paused the AsyncEventQueue");
vm4.invoke(AsyncEventQueueTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_PR", 80 });
- LogWriterSupport.getLogWriter().info("Done puts");
+ LogWriterUtils.getLogWriter().info("Done puts");
Set<Integer> primaryBucketsVm5 = (Set<Integer>)vm5.invoke(
AsyncEventQueueTestBase.class, "getAllPrimaryBucketsOnTheNode",
new Object[] { getTestMethodName() + "_PR" });
- LogWriterSupport.getLogWriter().info("Primary buckets on vm5: " + primaryBucketsVm5);
+ LogWriterUtils.getLogWriter().info("Primary buckets on vm5: " + primaryBucketsVm5);
// ---------------------------- Kill vm5 --------------------------
vm5.invoke(AsyncEventQueueTestBase.class, "killSender", new Object[] {});
@@ -1728,14 +1728,14 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
vm4.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created the cache");
+ LogWriterUtils.getLogWriter().info("Created the cache");
vm4.invoke(AsyncEventQueueTestBase.class, "createAsyncEventQueueWithListener2",
new Object[] { "ln", true, 100, 5, false, null });
vm5.invoke(AsyncEventQueueTestBase.class, "createAsyncEventQueueWithListener2",
new Object[] { "ln", true, 100, 5, false, null });
- LogWriterSupport.getLogWriter().info("Created the AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Created the AsyncEventQueue");
vm4.invoke(AsyncEventQueueTestBase.class,
"createPRWithRedundantCopyWithAsyncEventQueue", new Object[] {
@@ -1744,7 +1744,7 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
"createPRWithRedundantCopyWithAsyncEventQueue", new Object[] {
getTestMethodName() + "_PR", "ln", isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created PR with AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Created PR with AsyncEventQueue");
vm4
.invoke(AsyncEventQueueTestBase.class, "pauseAsyncEventQueue",
@@ -1755,18 +1755,18 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
Wait.pause(1000);// pause for the batchTimeInterval to make sure the AsyncQueue
// is paused
- LogWriterSupport.getLogWriter().info("Paused the AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Paused the AsyncEventQueue");
vm4.invoke(AsyncEventQueueTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_PR", 80 });
- LogWriterSupport.getLogWriter().info("Done puts");
+ LogWriterUtils.getLogWriter().info("Done puts");
Set<Integer> primaryBucketsVm5 = (Set<Integer>)vm5.invoke(
AsyncEventQueueTestBase.class, "getAllPrimaryBucketsOnTheNode",
new Object[] { getTestMethodName() + "_PR" });
- LogWriterSupport.getLogWriter().info("Primary buckets on vm5: " + primaryBucketsVm5);
+ LogWriterUtils.getLogWriter().info("Primary buckets on vm5: " + primaryBucketsVm5);
// ---------------------------- Kill vm5 --------------------------
vm5.invoke(AsyncEventQueueTestBase.class, "killSender", new Object[] {});
// ----------------------------------------------------------------
@@ -1812,14 +1812,14 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
vm4.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created the cache");
+ LogWriterUtils.getLogWriter().info("Created the cache");
vm4.invoke(AsyncEventQueueTestBase.class, "createAsyncEventQueueWithListener2",
new Object[] { "ln", true, 100, 5, false, null });
vm5.invoke(AsyncEventQueueTestBase.class, "createAsyncEventQueueWithListener2",
new Object[] { "ln", true, 100, 5, false, null });
- LogWriterSupport.getLogWriter().info("Created the AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Created the AsyncEventQueue");
vm4.invoke(AsyncEventQueueTestBase.class,
"createPRWithRedundantCopyWithAsyncEventQueue", new Object[] {
@@ -1828,7 +1828,7 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
"createPRWithRedundantCopyWithAsyncEventQueue", new Object[] {
getTestMethodName() + "_PR", "ln", isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created PR with AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Created PR with AsyncEventQueue");
vm4
.invoke(AsyncEventQueueTestBase.class, "pauseAsyncEventQueue",
@@ -1839,12 +1839,12 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
Wait.pause(1000);// pause for the batchTimeInterval to make sure the AsyncQueue
// is paused
- LogWriterSupport.getLogWriter().info("Paused the AsyncEventQueue");
+ LogWriterUtils.getLogWriter().info("Paused the AsyncEventQueue");
vm4.invoke(AsyncEventQueueTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_PR", 80 });
- LogWriterSupport.getLogWriter().info("Done puts");
+ LogWriterUtils.getLogWriter().info("Done puts");
// ---------------------------- start vm6 --------------------------
vm6.invoke(AsyncEventQueueTestBase.class, "createCache", new Object[] { lnPort });
@@ -1860,7 +1860,7 @@ public class AsyncEventListenerDUnitTest extends AsyncEventQueueTestBase {
Set<Integer> primaryBucketsVm6 = (Set<Integer>)vm6.invoke(
AsyncEventQueueTestBase.class, "getAllPrimaryBucketsOnTheNode",
new Object[] { getTestMethodName() + "_PR" });
- LogWriterSupport.getLogWriter().info("Primary buckets on vm6: " + primaryBucketsVm6);
+ LogWriterUtils.getLogWriter().info("Primary buckets on vm6: " + primaryBucketsVm6);
vm4.invoke(AsyncEventQueueTestBase.class, "resumeAsyncEventQueue",
new Object[] { "ln" });
vm5.invoke(AsyncEventQueueTestBase.class, "resumeAsyncEventQueue",
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionCacheConfigDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionCacheConfigDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionCacheConfigDUnitTest.java
index d6e130c..653a376 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionCacheConfigDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionCacheConfigDUnitTest.java
@@ -27,7 +27,7 @@ import com.gemstone.gemfire.compression.Compressor;
import com.gemstone.gemfire.compression.SnappyCompressor;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -134,15 +134,15 @@ public class CompressionCacheConfigDUnitTest extends CacheTestCase {
disconnectFromDS();
Properties props = new Properties();
props.setProperty("cache-xml-file",cacheXml);
- LogWriterSupport.getLogWriter().info("<ExpectedException action=add>ClassNotFoundException</ExpectedException>");
+ LogWriterUtils.getLogWriter().info("<ExpectedException action=add>ClassNotFoundException</ExpectedException>");
getSystem(props);
assertNotNull(getCache());
return Boolean.TRUE;
} catch(Exception e) {
- LogWriterSupport.getLogWriter().error("Could not create the cache", e);
+ LogWriterUtils.getLogWriter().error("Could not create the cache", e);
return Boolean.FALSE;
} finally {
- LogWriterSupport.getLogWriter().info("<ExpectedException action=remove>ClassNotFoundException</ExpectedException>");
+ LogWriterUtils.getLogWriter().info("<ExpectedException action=remove>ClassNotFoundException</ExpectedException>");
}
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionRegionConfigDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionRegionConfigDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionRegionConfigDUnitTest.java
index 7442ce0..cd71a2c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionRegionConfigDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/compression/CompressionRegionConfigDUnitTest.java
@@ -27,9 +27,9 @@ import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.compression.Compressor;
import com.gemstone.gemfire.compression.SnappyCompressor;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -369,7 +369,7 @@ public class CompressionRegionConfigDUnitTest extends CacheTestCase {
try {
assertNotNull(createServerRegion(name,dataPolicy,compressor));
} catch(Exception e) {
- LogWriterSupport.getLogWriter().error("Could not create the compressed region", e);
+ LogWriterUtils.getLogWriter().error("Could not create the compressed region", e);
return Boolean.FALSE;
}
@@ -392,7 +392,7 @@ public class CompressionRegionConfigDUnitTest extends CacheTestCase {
try {
assertNotNull(createRegion(name,dataPolicy,compressor));
} catch(Exception e) {
- LogWriterSupport.getLogWriter().error("Could not create the compressed region", e);
+ LogWriterUtils.getLogWriter().error("Could not create the compressed region", e);
return Boolean.FALSE;
}
@@ -415,7 +415,7 @@ public class CompressionRegionConfigDUnitTest extends CacheTestCase {
try {
assertNotNull(createRegion(name,dataPolicy,compressor,diskStoreName));
} catch(Exception e) {
- LogWriterSupport.getLogWriter().error("Could not create the compressed region", e);
+ LogWriterUtils.getLogWriter().error("Could not create the compressed region", e);
return Boolean.FALSE;
}
@@ -439,7 +439,7 @@ public class CompressionRegionConfigDUnitTest extends CacheTestCase {
try {
assertNotNull(createClientRegion(name,compressor,shortcut));
} catch(Exception e) {
- LogWriterSupport.getLogWriter().error("Could not create the compressed region", e);
+ LogWriterUtils.getLogWriter().error("Could not create the compressed region", e);
return Boolean.FALSE;
}
@@ -521,7 +521,7 @@ public class CompressionRegionConfigDUnitTest extends CacheTestCase {
}
// Running in hydra
else {
- return DistributedTestSupport.getDUnitLocatorPort();
+ return DistributedTestUtils.getDUnitLocatorPort();
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/ExceptionsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/ExceptionsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/ExceptionsDUnitTest.java
index 7349431..3f2a0bc 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/ExceptionsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/ExceptionsDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.internal.OSProcess;
import com.gemstone.gemfire.internal.jta.CacheUtils;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -61,7 +61,7 @@ public class ExceptionsDUnitTest extends DistributedTestCase {
//
// sb.append(lineSep);
}
- LogWriterSupport.getLogWriter().fine("***********\n " + sb);
+ LogWriterUtils.getLogWriter().fine("***********\n " + sb);
return sb.toString();
}
@@ -111,10 +111,10 @@ public class ExceptionsDUnitTest extends DistributedTestCase {
* value=\"83f0069202c571faf1ae6c42b4ad46030e4e31c17409e19a\"/>";
*/
int n1 = str.indexOf(search);
- LogWriterSupport.getLogWriter().fine("Start Index = " + n1);
+ LogWriterUtils.getLogWriter().fine("Start Index = " + n1);
int n2 = str.indexOf(last_search, n1);
StringBuffer sbuff = new StringBuffer(str);
- LogWriterSupport.getLogWriter().fine("END Index = " + n2);
+ LogWriterUtils.getLogWriter().fine("END Index = " + n2);
String modified_str = sbuff.replace(n1, n2, new_str).toString();
return modified_str;
}
@@ -174,7 +174,7 @@ public class ExceptionsDUnitTest extends DistributedTestCase {
if (ds != null) ds.disconnect();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Error in disconnecting from Distributed System");
+ LogWriterUtils.getLogWriter().fine("Error in disconnecting from Distributed System");
}
}
@@ -235,7 +235,7 @@ public class ExceptionsDUnitTest extends DistributedTestCase {
+ "occur");
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Exception caught in runTest1 due to : " + e);
+ LogWriterUtils.getLogWriter().fine("Exception caught in runTest1 due to : " + e);
fail("failed in runTest1 due to " + e);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/IdleTimeOutDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/IdleTimeOutDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/IdleTimeOutDUnitTest.java
index f356a79..02d2531 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/IdleTimeOutDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/IdleTimeOutDUnitTest.java
@@ -41,8 +41,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -65,7 +65,7 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
//
// sb.append(lineSep);
}
- LogWriterSupport.getLogWriter().info("***********\n " + sb);
+ LogWriterUtils.getLogWriter().info("***********\n " + sb);
return sb.toString();
}
@@ -115,22 +115,22 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
* value=\"83f0069202c571faf1ae6c42b4ad46030e4e31c17409e19a\"/>";
*/
int n1 = str.indexOf(search);
- LogWriterSupport.getLogWriter().info("Start Index = " + n1);
+ LogWriterUtils.getLogWriter().info("Start Index = " + n1);
int n2 = str.indexOf(last_search, n1);
StringBuffer sbuff = new StringBuffer(str);
- LogWriterSupport.getLogWriter().info("END Index = " + n2);
+ LogWriterUtils.getLogWriter().info("END Index = " + n2);
String modified_str = sbuff.replace(n1, n2, new_str).toString();
return modified_str;
}
public static String init(String className) throws Exception {
- LogWriterSupport.getLogWriter().fine("PATH11 ");
+ LogWriterUtils.getLogWriter().fine("PATH11 ");
Properties props = new Properties();
String path = System.getProperty("CACHEXMLFILE");
- LogWriterSupport.getLogWriter().fine("PATH2 " + path);
+ LogWriterUtils.getLogWriter().fine("PATH2 " + path);
int pid = OSProcess.getId();
path = File.createTempFile("dunit-cachejta_", ".xml").getAbsolutePath();
- LogWriterSupport.getLogWriter().fine("PATH " + path);
+ LogWriterUtils.getLogWriter().fine("PATH " + path);
/** * Return file as string and then modify the string accordingly ** */
String file_as_str = readFile(TestUtil.getResourcePath(CacheUtils.class, "cachejta.xml"));
file_as_str = file_as_str.replaceAll("newDB", "newDB_" + pid);
@@ -172,7 +172,7 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
String sql = "create table "
+ tableName
+ " (id integer NOT NULL, name varchar(50), CONSTRAINT "+tableName+"_key PRIMARY KEY(id))";
- LogWriterSupport.getLogWriter().info(sql);
+ LogWriterUtils.getLogWriter().info(sql);
Connection conn = ds.getConnection();
Statement sm = conn.createStatement();
sm.execute(sql);
@@ -181,7 +181,7 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
for (int i = 1; i <= 10; i++) {
sql = "insert into " + tableName + " values (" + i + ",'name" + i + "')";
sm.addBatch(sql);
- LogWriterSupport.getLogWriter().info(sql);
+ LogWriterUtils.getLogWriter().info(sql);
}
sm.executeBatch();
conn.close();
@@ -193,18 +193,18 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
Context ctx = cache.getJNDIContext();
DataSource ds = (DataSource) ctx.lookup("java:/SimpleDataSource");
Connection conn = ds.getConnection();
- LogWriterSupport.getLogWriter().info(" trying to drop table: " + tableName);
+ LogWriterUtils.getLogWriter().info(" trying to drop table: " + tableName);
String sql = "drop table " + tableName;
Statement sm = conn.createStatement();
sm.execute(sql);
conn.close();
}
catch (NamingException ne) {
- LogWriterSupport.getLogWriter().info("destroy table naming exception: " + ne);
+ LogWriterUtils.getLogWriter().info("destroy table naming exception: " + ne);
throw ne;
}
catch (SQLException se) {
- LogWriterSupport.getLogWriter().info("destroy table sql exception: " + se);
+ LogWriterUtils.getLogWriter().info("destroy table sql exception: " + se);
throw se;
}
finally {
@@ -240,7 +240,7 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
ds.disconnect();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Error in disconnecting from Distributed System");
+ LogWriterUtils.getLogWriter().info("Error in disconnecting from Distributed System");
}
}
@@ -267,7 +267,7 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
vm0.invoke(IdleTimeOutDUnitTest.class, "runTest1");
AsyncInvocation asyncObj = vm0.invokeAsync(IdleTimeOutDUnitTest.class,
"runTest2");
- Threads.join(asyncObj, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj, 30 * 1000);
if(asyncObj.exceptionOccurred()){
Assert.fail("asyncObj failed", asyncObj.getException());
}
@@ -281,27 +281,27 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
ds = (DataSource) ctx.lookup("java:/XAPooledDataSource");
}
catch (NamingException e) {
- LogWriterSupport.getLogWriter().info("Naming Exception caught in lookup: " + e);
+ LogWriterUtils.getLogWriter().info("Naming Exception caught in lookup: " + e);
fail("failed in naming lookup: " + e);
return;
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception caught during naming lookup: " + e);
+ LogWriterUtils.getLogWriter().info("Exception caught during naming lookup: " + e);
fail("failed in naming lookup: " + e);
return;
}
try {
for (int count = 0; count < MAX_CONNECTIONS; count++) {
ds.getConnection();
- LogWriterSupport.getLogWriter().info("runTest1 :acquired connection #" + count);
+ LogWriterUtils.getLogWriter().info("runTest1 :acquired connection #" + count);
}
}
catch (SQLException e) {
- LogWriterSupport.getLogWriter().info("Success SQLException caught in runTest1: " + e);
+ LogWriterUtils.getLogWriter().info("Success SQLException caught in runTest1: " + e);
fail("runTest1 SQL Exception caught: " + e);
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception caught in runTest1: " + e);
+ LogWriterUtils.getLogWriter().info("Exception caught in runTest1: " + e);
fail("Exception caught in runTest1: " + e);
e.printStackTrace();
}
@@ -317,12 +317,12 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
ds = (DataSource) ctx.lookup("java:/XAPooledDataSource");
}
catch (NamingException e) {
- LogWriterSupport.getLogWriter().info("Exception caught during naming lookup: " + e);
+ LogWriterUtils.getLogWriter().info("Exception caught during naming lookup: " + e);
fail("failed in naming lookup: " + e);
return;
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception caught during naming lookup: " + e);
+ LogWriterUtils.getLogWriter().info("Exception caught during naming lookup: " + e);
fail("failed in because of unhandled excpetion: " + e);
return;
}
@@ -330,16 +330,16 @@ public class IdleTimeOutDUnitTest extends DistributedTestCase {
for (int count = 0; count < MAX_CONNECTIONS; count++) {
Connection con = ds.getConnection();
assertNotNull("Connection object is null", con);
- LogWriterSupport.getLogWriter().info("runTest2 :acquired connection #" + count);
+ LogWriterUtils.getLogWriter().info("runTest2 :acquired connection #" + count);
}
}
catch (SQLException sqle) {
- LogWriterSupport.getLogWriter().info("SQLException caught in runTest2: " + sqle);
+ LogWriterUtils.getLogWriter().info("SQLException caught in runTest2: " + sqle);
fail("failed because of SQL exception : " + sqle);
sqle.printStackTrace();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception caught in runTest2: " + e);
+ LogWriterUtils.getLogWriter().info("Exception caught in runTest2: " + e);
fail("failed because of unhandled exception : " + e);
e.printStackTrace();
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/LoginTimeOutDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/LoginTimeOutDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/LoginTimeOutDUnitTest.java
index 5362107..c5a4e02 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/LoginTimeOutDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/LoginTimeOutDUnitTest.java
@@ -44,9 +44,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
import com.gemstone.gemfire.test.dunit.RMIException;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -253,11 +252,11 @@ public class LoginTimeOutDUnitTest extends DistributedTestCase {
VM vm0 = host.getVM(0);
AsyncInvocation test1 = vm0.invokeAsync(LoginTimeOutDUnitTest.class, "runTest1");
AsyncInvocation test2 = vm0.invokeAsync(LoginTimeOutDUnitTest.class, "runTest2");
- Threads.join(test2, 120 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(test2, 120 * 1000);
if(test2.exceptionOccurred()){
Assert.fail("asyncObj failed", test2.getException());
}
- Threads.join(test1, 30000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(test1, 30000);
}
public static void runTest1() throws Exception {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/MaxPoolSizeDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/MaxPoolSizeDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/MaxPoolSizeDUnitTest.java
index 7bd8902..40a165b 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/MaxPoolSizeDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/MaxPoolSizeDUnitTest.java
@@ -41,8 +41,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -65,7 +65,7 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
//
// sb.append(lineSep);
}
- LogWriterSupport.getLogWriter().fine("***********\n " + sb);
+ LogWriterUtils.getLogWriter().fine("***********\n " + sb);
return sb.toString();
}
@@ -115,20 +115,20 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
* value=\"83f0069202c571faf1ae6c42b4ad46030e4e31c17409e19a\"/>";
*/
int n1 = str.indexOf(search);
- LogWriterSupport.getLogWriter().fine("Start Index = " + n1);
+ LogWriterUtils.getLogWriter().fine("Start Index = " + n1);
int n2 = str.indexOf(last_search, n1);
StringBuffer sbuff = new StringBuffer(str);
- LogWriterSupport.getLogWriter().fine("END Index = " + n2);
+ LogWriterUtils.getLogWriter().fine("END Index = " + n2);
String modified_str = sbuff.replace(n1, n2, new_str).toString();
return modified_str;
}
public static String init(String className) throws Exception {
- LogWriterSupport.getLogWriter().fine("PATH11 ");
+ LogWriterUtils.getLogWriter().fine("PATH11 ");
Properties props = new Properties();
int pid = OSProcess.getId();
String path = File.createTempFile("dunit-cachejta_", ".xml").getAbsolutePath();
- LogWriterSupport.getLogWriter().fine("PATH " + path);
+ LogWriterUtils.getLogWriter().fine("PATH " + path);
/** * Return file as string and then modify the string accordingly ** */
String file_as_str = readFile(TestUtil.getResourcePath(CacheUtils.class, "cachejta.xml"));
file_as_str = file_as_str.replaceAll("newDB", "newDB_" + pid);
@@ -170,7 +170,7 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
String sql = "create table "
+ tableName
+ " (id integer NOT NULL, name varchar(50), CONSTRAINT "+tableName+"_key PRIMARY KEY(id))";
- LogWriterSupport.getLogWriter().fine(sql);
+ LogWriterUtils.getLogWriter().fine(sql);
Connection conn = ds.getConnection();
Statement sm = conn.createStatement();
sm.execute(sql);
@@ -179,7 +179,7 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
for (int i = 1; i <= 10; i++) {
sql = "insert into " + tableName + " values (" + i + ",'name" + i + "')";
sm.addBatch(sql);
- LogWriterSupport.getLogWriter().fine(sql);
+ LogWriterUtils.getLogWriter().fine(sql);
}
sm.executeBatch();
conn.close();
@@ -191,19 +191,19 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
Context ctx = cache.getJNDIContext();
DataSource ds = (DataSource) ctx.lookup("java:/SimpleDataSource");
Connection conn = ds.getConnection();
- LogWriterSupport.getLogWriter().fine(" trying to drop table: " + tableName);
+ LogWriterUtils.getLogWriter().fine(" trying to drop table: " + tableName);
String sql = "drop table " + tableName;
Statement sm = conn.createStatement();
sm.execute(sql);
conn.close();
}
catch (NamingException ne) {
- LogWriterSupport.getLogWriter().fine("destroy table naming exception: " + ne);
+ LogWriterUtils.getLogWriter().fine("destroy table naming exception: " + ne);
throw ne;
}
catch (SQLException se) {
if (!se.getMessage().contains("A lock could not be obtained within the time requested")) {
- LogWriterSupport.getLogWriter().fine("destroy table sql exception: " + se);
+ LogWriterUtils.getLogWriter().fine("destroy table sql exception: " + se);
throw se;
} else {
// disregard - this happens sometimes on unit test runs on slower
@@ -241,7 +241,7 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
ds.disconnect();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Error in disconnecting from Distributed System");
+ LogWriterUtils.getLogWriter().fine("Error in disconnecting from Distributed System");
}
}
@@ -270,7 +270,7 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
VM vm0 = host.getVM(0);
AsyncInvocation asyncObj = vm0.invokeAsync(MaxPoolSizeDUnitTest.class,
"runTest1");
- Threads.join(asyncObj, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj, 30 * 1000);
if(asyncObj.exceptionOccurred()){
Assert.fail("asyncObj failed", asyncObj.getException());
}
@@ -285,19 +285,19 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
ds = (DataSource) ctx.lookup("java:/XAPooledDataSource");
}
catch (NamingException e) {
- LogWriterSupport.getLogWriter().fine("Naming Exception caught in lookup: " + e);
+ LogWriterUtils.getLogWriter().fine("Naming Exception caught in lookup: " + e);
fail("failed in naming lookup: " + e);
return;
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Exception caught during naming lookup: " + e);
+ LogWriterUtils.getLogWriter().fine("Exception caught during naming lookup: " + e);
fail("failed in naming lookup: " + e);
return;
}
try {
for (count = 0; count < MAX_CONNECTIONS; count++) {
ds.getConnection();
- LogWriterSupport.getLogWriter().fine("Thread 1 acquired connection #" + count);
+ LogWriterUtils.getLogWriter().fine("Thread 1 acquired connection #" + count);
}
fail("expected max connect exception");
}
@@ -306,7 +306,7 @@ public class MaxPoolSizeDUnitTest extends DistributedTestCase {
Assert.fail("runTest1 SQL Exception", e);
}
else {
- LogWriterSupport.getLogWriter().fine("Success SQLException caught at connection #"
+ LogWriterUtils.getLogWriter().fine("Success SQLException caught at connection #"
+ count);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TransactionTimeOutDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TransactionTimeOutDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TransactionTimeOutDUnitTest.java
index 86482a9..aeb36ea 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TransactionTimeOutDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TransactionTimeOutDUnitTest.java
@@ -45,8 +45,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -138,8 +138,8 @@ public class TransactionTimeOutDUnitTest extends DistributedTestCase {
AsyncInvocation async1 = vm0.invokeAsync(TransactionTimeOutDUnitTest.class, "runTest1");
AsyncInvocation async2 =vm0.invokeAsync(TransactionTimeOutDUnitTest.class, "runTest2");
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
if(async1.exceptionOccurred()){
Assert.fail("async1 failed", async1.getException());
}
@@ -216,7 +216,7 @@ public class TransactionTimeOutDUnitTest extends DistributedTestCase {
return;
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Exception caught " + e);
+ LogWriterUtils.getLogWriter().fine("Exception caught " + e);
fail("failed in naming lookup: " + e);
return;
}
@@ -242,7 +242,7 @@ public class TransactionTimeOutDUnitTest extends DistributedTestCase {
return;
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Exception caught " + e);
+ LogWriterUtils.getLogWriter().fine("Exception caught " + e);
fail("failed in naming lookup: " + e);
return;
}
@@ -495,7 +495,7 @@ public class TransactionTimeOutDUnitTest extends DistributedTestCase {
//
// sb.append(lineSep);
}
- LogWriterSupport.getLogWriter().fine("***********\n " + sb);
+ LogWriterUtils.getLogWriter().fine("***********\n " + sb);
return sb.toString();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnManagerMultiThreadDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnManagerMultiThreadDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnManagerMultiThreadDUnitTest.java
index 5cfa42f..ded9cd7 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnManagerMultiThreadDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnManagerMultiThreadDUnitTest.java
@@ -46,8 +46,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -187,7 +187,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
String jtest = System.getProperty("JTESTS");
int pid = OSProcess.getId();
String path = File.createTempFile("dunit-cachejta_", ".xml").getAbsolutePath();
- LogWriterSupport.getLogWriter().fine("PATH " + path);
+ LogWriterUtils.getLogWriter().fine("PATH " + path);
/** * Return file as string and then modify the string accordingly ** */
String file_as_str = readFile(TestUtil.getResourcePath(CacheUtils.class, "cachejta.xml"));
file_as_str = file_as_str.replaceAll("newDB", "newDB_" + pid);
@@ -213,7 +213,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("", e);
+ LogWriterUtils.getLogWriter().info("", e);
throw new Exception("" + e);
}
return tableName;
@@ -228,7 +228,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
String sql = "create table "
+ tableName
+ " (id integer NOT NULL, name varchar(50), CONSTRAINT "+tableName+"_key PRIMARY KEY(id))";
- LogWriterSupport.getLogWriter().fine(sql);
+ LogWriterUtils.getLogWriter().fine(sql);
Connection conn = ds.getConnection();
Statement sm = conn.createStatement();
sm.execute(sql);
@@ -237,7 +237,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
for (int i = 1; i <= 10; i++) {
sql = "insert into " + tableName + " values (" + i + ",'name" + i + "')";
sm.addBatch(sql);
- LogWriterSupport.getLogWriter().fine(sql);
+ LogWriterUtils.getLogWriter().fine(sql);
}
sm.executeBatch();
conn.close();
@@ -255,28 +255,28 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
}
try {
String tableName = tblName;
- LogWriterSupport.getLogWriter().fine("Destroying table: " + tableName);
+ LogWriterUtils.getLogWriter().fine("Destroying table: " + tableName);
cache = TxnManagerMultiThreadDUnitTest.getCache();
Context ctx = cache.getJNDIContext();
DataSource ds = (DataSource) ctx.lookup("java:/SimpleDataSource");
Connection conn = ds.getConnection();
- LogWriterSupport.getLogWriter().fine(" trying to drop table: " + tableName);
+ LogWriterUtils.getLogWriter().fine(" trying to drop table: " + tableName);
String sql = "drop table " + tableName;
Statement sm = conn.createStatement();
sm.execute(sql);
conn.close();
- LogWriterSupport.getLogWriter().fine("destroyTable is Successful!");
+ LogWriterUtils.getLogWriter().fine("destroyTable is Successful!");
}
catch (NamingException ne) {
- LogWriterSupport.getLogWriter().fine("destroy table naming exception: " + ne);
+ LogWriterUtils.getLogWriter().fine("destroy table naming exception: " + ne);
throw ne;
}
catch (SQLException se) {
- LogWriterSupport.getLogWriter().fine("destroy table sql exception: " + se);
+ LogWriterUtils.getLogWriter().fine("destroy table sql exception: " + se);
throw se;
}
finally {
- LogWriterSupport.getLogWriter().fine("Closing cache...");
+ LogWriterUtils.getLogWriter().fine("Closing cache...");
closeCache();
}
}//end of destroyTable
@@ -292,7 +292,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().warning("exception while creating cache", e);
+ LogWriterUtils.getLogWriter().warning("exception while creating cache", e);
}
}//end of startCache
@@ -300,18 +300,18 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
try {
if (!cache.isClosed()) {
cache.close();
- LogWriterSupport.getLogWriter().fine("Cache closed");
+ LogWriterUtils.getLogWriter().fine("Cache closed");
}
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().warning("exception while closing cache", e);
+ LogWriterUtils.getLogWriter().warning("exception while closing cache", e);
}
try {
CacheUtils.ds.disconnect();
- LogWriterSupport.getLogWriter().fine("Disconnected from Distribuited System");
+ LogWriterUtils.getLogWriter().fine("Disconnected from Distribuited System");
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Error in disconnecting from Distributed System");
+ LogWriterUtils.getLogWriter().fine("Error in disconnecting from Distributed System");
}
}//end of closeCache
@@ -351,7 +351,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
/*int rowsDeleted = */jtaObj.deleteRows(tblName_delRows);
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().warning("Error: while deleting rows from database using JTAUtils", e);
+ LogWriterUtils.getLogWriter().warning("Error: while deleting rows from database using JTAUtils", e);
}
}//end of delRows
@@ -408,11 +408,11 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
//get how many rows actually got committed
try {
int rows = jtaObj.getRows(tblName);
- LogWriterSupport.getLogWriter().fine("Number of rows committed current test method are: "
+ LogWriterUtils.getLogWriter().fine("Number of rows committed current test method are: "
+ rows);
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().warning("Error: while getting rows from database using JTAUtils", e);
+ LogWriterUtils.getLogWriter().warning("Error: while getting rows from database using JTAUtils", e);
}
}//end of getNumberOfRows
@@ -425,7 +425,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
VM vm0 = Host.getHost(0).getVM(0);
AsyncInvocation asyncObj1 = vm0.invokeAsync(
TxnManagerMultiThreadDUnitTest.class, "callCommitThreads");
- Threads.join(asyncObj1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj1, 30 * 1000);
if(asyncObj1.exceptionOccurred()){
Assert.fail("asyncObj1 failed", asyncObj1.getException());
}
@@ -437,15 +437,15 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
*
*/
public static void callCommitThreads() {
- LogWriterSupport.getLogWriter().fine("This is callCommitThreads method");
+ LogWriterUtils.getLogWriter().fine("This is callCommitThreads method");
try {
- new CommitThread("ct1", LogWriterSupport.getLogWriter());
- new CommitThread("ct2", LogWriterSupport.getLogWriter());
- new CommitThread("ct3", LogWriterSupport.getLogWriter());
- new CommitThread("ct4", LogWriterSupport.getLogWriter());
+ new CommitThread("ct1", LogWriterUtils.getLogWriter());
+ new CommitThread("ct2", LogWriterUtils.getLogWriter());
+ new CommitThread("ct3", LogWriterUtils.getLogWriter());
+ new CommitThread("ct4", LogWriterUtils.getLogWriter());
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().warning("Failed in Commit Threads", e);
+ LogWriterUtils.getLogWriter().warning("Failed in Commit Threads", e);
fail("Failed in Commit Threads" + e);
}
}//end of callCommitTheads
@@ -458,7 +458,7 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
VM vm0 = Host.getHost(0).getVM(0);
AsyncInvocation asyncObj1 = vm0.invokeAsync(
TxnManagerMultiThreadDUnitTest.class, "callCommitandRollbackThreads");
- Threads.join(asyncObj1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj1, 30 * 1000);
if(asyncObj1.exceptionOccurred()){
Assert.fail("asyncObj1 failed", asyncObj1.getException());
}
@@ -466,16 +466,16 @@ public class TxnManagerMultiThreadDUnitTest extends DistributedTestCase {
}//end of test3Commit2Rollback
public static void callCommitandRollbackThreads() {
- LogWriterSupport.getLogWriter().fine("This is callCommitandRollbackThreads method");
+ LogWriterUtils.getLogWriter().fine("This is callCommitandRollbackThreads method");
try {
- new CommitThread("ct1", LogWriterSupport.getLogWriter());
- new CommitThread("ct2", LogWriterSupport.getLogWriter());
- new CommitThread("ct3", LogWriterSupport.getLogWriter());
- new RollbackThread("rt1", LogWriterSupport.getLogWriter());
- new RollbackThread("rt2", LogWriterSupport.getLogWriter());
+ new CommitThread("ct1", LogWriterUtils.getLogWriter());
+ new CommitThread("ct2", LogWriterUtils.getLogWriter());
+ new CommitThread("ct3", LogWriterUtils.getLogWriter());
+ new RollbackThread("rt1", LogWriterUtils.getLogWriter());
+ new RollbackThread("rt2", LogWriterUtils.getLogWriter());
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Failed in Commit and Rollback threads", e);
+ LogWriterUtils.getLogWriter().info("Failed in Commit and Rollback threads", e);
fail("Failed in Commit and Rollback Threads" + e);
}
}//end of callCommitandRollbackThreads
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnTimeOutDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnTimeOutDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnTimeOutDUnitTest.java
index 879f3e4..1ea6b54 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnTimeOutDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/jta/dunit/TxnTimeOutDUnitTest.java
@@ -39,8 +39,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -72,7 +72,7 @@ public class TxnTimeOutDUnitTest extends DistributedTestCase {
wr.close();
props.setProperty("cache-xml-file", path);
// props.setProperty("mcast-port", "10321");
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
try {
// ds = DistributedSystem.connect(props);
ds = (new TxnTimeOutDUnitTest("temp")).getSystem(props);
@@ -157,27 +157,27 @@ public class TxnTimeOutDUnitTest extends DistributedTestCase {
AsyncInvocation asyncObj5 = vm0.invokeAsync(TxnTimeOutDUnitTest.class,
"runTest3",o4);
- Threads.join(asyncObj1, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj1, 5 * 60 * 1000);
if(asyncObj1.exceptionOccurred()){
Assert.fail("asyncObj1 failed", asyncObj1.getException());
}
- Threads.join(asyncObj2, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj2, 5 * 60 * 1000);
if(asyncObj2.exceptionOccurred()){
Assert.fail("asyncObj2 failed", asyncObj2.getException());
}
- Threads.join(asyncObj3, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj3, 5 * 60 * 1000);
if(asyncObj3.exceptionOccurred()){
Assert.fail("asyncObj3 failed", asyncObj3.getException());
}
- Threads.join(asyncObj4, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj4, 5 * 60 * 1000);
if(asyncObj4.exceptionOccurred()){
Assert.fail("asyncObj4 failed", asyncObj4.getException());
}
- Threads.join(asyncObj5, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj5, 5 * 60 * 1000);
if(asyncObj5.exceptionOccurred()){
Assert.fail("asyncObj5 failed", asyncObj5.getException());
}
@@ -196,12 +196,12 @@ public class TxnTimeOutDUnitTest extends DistributedTestCase {
"runTest2");
AsyncInvocation asyncObj2 = vm0.invokeAsync(TxnTimeOutDUnitTest.class, "runTest1");
- Threads.join(asyncObj1, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj1, 5 * 60 * 1000);
if(asyncObj1.exceptionOccurred()){
Assert.fail("asyncObj1 failed", asyncObj1.getException());
}
- Threads.join(asyncObj2, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncObj2, 5 * 60 * 1000);
if(asyncObj2.exceptionOccurred()){
Assert.fail("asyncObj2 failed", asyncObj2.getException());
}
@@ -230,7 +230,7 @@ public class TxnTimeOutDUnitTest extends DistributedTestCase {
fail("Exception did not occur although was supposed to occur");
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Exception caught " + e);
+ LogWriterUtils.getLogWriter().fine("Exception caught " + e);
fail("failed in naming lookup: " + e);
}
finally {
@@ -259,7 +259,7 @@ public class TxnTimeOutDUnitTest extends DistributedTestCase {
fail("Exception did not occur although was supposed to occur");
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().fine("Exception caught " + e);
+ LogWriterUtils.getLogWriter().fine("Exception caught " + e);
fail("failed in naming lookup: " + e);
}
}
@@ -303,7 +303,7 @@ public class TxnTimeOutDUnitTest extends DistributedTestCase {
//
// sb.append(lineSep);
}
- LogWriterSupport.getLogWriter().fine("***********\n " + sb);
+ LogWriterUtils.getLogWriter().fine("***********\n " + sb);
return sb.toString();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/logging/MergeLogFilesJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/logging/MergeLogFilesJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/logging/MergeLogFilesJUnitTest.java
index d1ec86e..32d38c8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/logging/MergeLogFilesJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/logging/MergeLogFilesJUnitTest.java
@@ -41,7 +41,7 @@ import org.junit.experimental.categories.Category;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.SystemFailure;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -79,7 +79,7 @@ public class MergeLogFilesJUnitTest {
for (Iterator iter = workers.iterator(); iter.hasNext(); ) {
Worker worker = (Worker) iter.next();
- Threads.join(worker, 120 * 1000, null);
+ ThreadUtils.join(worker, 120 * 1000);
}
if (group.exceptionOccurred()) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/CacheManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/CacheManagementDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/CacheManagementDUnitTest.java
index 23ee48d..402ea94 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/CacheManagementDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/CacheManagementDUnitTest.java
@@ -46,7 +46,7 @@ import com.gemstone.gemfire.management.internal.ManagementConstants;
import com.gemstone.gemfire.management.internal.SystemManagementService;
import com.gemstone.gemfire.management.internal.NotificationHub.NotificationHubListener;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -144,7 +144,7 @@ public class CacheManagementDUnitTest extends ManagementTestBase {
String log = (String) vm.invoke(CacheManagementDUnitTest.class,
"fetchLog");
assertNotNull(log);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Log Of Member is " + log.toString()
+ "</ExpectedString> ");
@@ -660,7 +660,7 @@ public class CacheManagementDUnitTest extends ManagementTestBase {
.getMemberMXBean();
JVMMetrics metrics = bean.showJVMMetrics();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> JVMMetrics is " + metrics.toString()
+ "</ExpectedString> ");
@@ -671,7 +671,7 @@ public class CacheManagementDUnitTest extends ManagementTestBase {
.getMemberMXBean();
OSMetrics metrics = bean.showOSMetrics();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> OSMetrics is " + metrics.toString()
+ "</ExpectedString> ");
@@ -712,14 +712,14 @@ public class CacheManagementDUnitTest extends ManagementTestBase {
MemberMXBean bean = MBeanUtil.getMemberMbeanProxy(member);
JVMMetrics metrics = bean.showJVMMetrics();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> JVMMetrics is " + metrics.toString()
+ "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> OSMetrics is " + metrics.toString()
+ "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Boolean Data Check " +bean.isManager()
+ "</ExpectedString> ");
[31/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingPoolDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingPoolDUnitTest.java
index dba995e..d1e42d3 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingPoolDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingPoolDUnitTest.java
@@ -50,11 +50,11 @@ import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientNotifier;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -127,7 +127,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
PoolFactory cpf = PoolManager.createFactory();
cpf.setSubscriptionEnabled(subscriptionEnabled);
for (int i=0; i < servers.length; i++){
- LogWriterSupport.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
+ LogWriterUtils.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
cpf.addServer(servers[i], ports[i]);
}
@@ -170,7 +170,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -196,7 +196,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + rootRegionName + "/" + name;
// Create client pool.
@@ -307,7 +307,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -332,7 +332,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + rootRegionName + "/" + name;
@@ -439,7 +439,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -572,7 +572,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -603,7 +603,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
} catch (Exception e) {
Assert.fail("Failed executing " + queryString, e);
}
- LogWriterSupport.getLogWriter().fine("size: " + results.size());
+ LogWriterUtils.getLogWriter().fine("size: " + results.size());
//assertEquals(numberOfEntries, results.size());
assertTrue(!results.getCollectionType().allowsDuplicates() && results.getCollectionType().getElementType().isStructType());
}
@@ -635,7 +635,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -661,7 +661,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + rootRegionName + "/" + name;
@@ -842,7 +842,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + this.rootRegionName + "/" + this.regionName;
@@ -867,7 +867,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
results = (SelectResults)query.execute(params[i]);
} catch (Exception e) {
@@ -900,7 +900,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
for (int x=0; x < useMaintainedCompiledQueries; x++){
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
results = (SelectResults)query.execute(params[i]);
} catch (Exception e) {
@@ -1001,7 +1001,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
final int port0 = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
final int port1 = vm1.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + this.rootRegionName + "/" + this.regionName;
@@ -1026,7 +1026,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
for (int j=0; j < queryString.length; j++){
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
results = (SelectResults)query.execute(params[i]);
} catch (Exception e) {
@@ -1152,7 +1152,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + this.rootRegionName + "/" + this.regionName;
@@ -1243,7 +1243,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + this.rootRegionName + "/" + this.regionName;
@@ -1363,7 +1363,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + this.rootRegionName + "/" + this.regionName;
@@ -1509,7 +1509,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
// Create client region
final int port0 = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + this.rootRegionName + "/" + this.regionName;
@@ -1535,7 +1535,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
for (int j=0; j < 5; j++){
for (int i=0; i < 2; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute(params[i]);
Query query2 = qService.newQuery(querys[i]);
@@ -1544,9 +1544,9 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
} catch (Exception e) {
Assert.fail("Failed executing " + queryString[i], e);
}
- LogWriterSupport.getLogWriter().info("### Comparing results for Query :" + ((i+1) * (j+1)) + " : " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Comparing results for Query :" + ((i+1) * (j+1)) + " : " + queryString[i]);
compareQueryResultsWithoutAndWithIndexes(rs, 1);
- LogWriterSupport.getLogWriter().info("### Done Comparing results for Query :" + ((i+1) * (j+1)) + " : " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Done Comparing results for Query :" + ((i+1) * (j+1)) + " : " + queryString[i]);
}
}
}
@@ -1561,9 +1561,9 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
public void run2() throws CacheException {
long compiledQueryUsedCount = -1;
while (true) {
- LogWriterSupport.getLogWriter().info("### CompiledQueryUsedCount :" + compiledQueryUsedCount);
+ LogWriterUtils.getLogWriter().info("### CompiledQueryUsedCount :" + compiledQueryUsedCount);
if (compiledQueryUsedCount == CacheClientNotifier.getInstance().getStats().getCompiledQueryUsedCount()) {
- LogWriterSupport.getLogWriter().info("### previous and current CompiledQueryUsedCounts are same :" + compiledQueryUsedCount);
+ LogWriterUtils.getLogWriter().info("### previous and current CompiledQueryUsedCounts are same :" + compiledQueryUsedCount);
break;
} else {
compiledQueryUsedCount = CacheClientNotifier.getInstance().getStats().getCompiledQueryUsedCount();
@@ -1595,7 +1595,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
for (int j=0; j < queryString.length; j++){
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute(params[i]);
Query query2 = qService.newQuery(querys[i]);
@@ -1619,9 +1619,9 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
public void run2() throws CacheException {
long compiledQueryUsedCount = -1;
while (true) {
- LogWriterSupport.getLogWriter().info("### previous CompiledQueryUsedCount :" + compiledQueryUsedCount);
+ LogWriterUtils.getLogWriter().info("### previous CompiledQueryUsedCount :" + compiledQueryUsedCount);
if (compiledQueryUsedCount == CacheClientNotifier.getInstance().getStats().getCompiledQueryUsedCount()) {
- LogWriterSupport.getLogWriter().info("### previous and current CompiledQueryUsedCounts are same :" + compiledQueryUsedCount);
+ LogWriterUtils.getLogWriter().info("### previous and current CompiledQueryUsedCounts are same :" + compiledQueryUsedCount);
break;
} else {
compiledQueryUsedCount = CacheClientNotifier.getInstance().getStats().getCompiledQueryUsedCount();
@@ -1665,7 +1665,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1696,7 +1696,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName1 = "/" + rootRegionName + "/" + name+"1";
final String regionName2 = "/" + rootRegionName + "/" + name+"2";
@@ -1768,7 +1768,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1793,7 +1793,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + rootRegionName + "/" + name;
@@ -1906,7 +1906,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
Wait.pause(1000);
@@ -1941,7 +1941,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName1 = "/" + rootRegionName + "/" + name;
final String regionName2 = "/" + rootRegionName + "/" + name + "_2";
@@ -2002,7 +2002,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -2036,7 +2036,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName = "/" + rootRegionName + "/" + name;
@@ -2160,7 +2160,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
Wait.pause(1000);
@@ -2187,7 +2187,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(QueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
final String regionName1 = "/" + rootRegionName + "/" + name;
// final String regionName2 = "/" + rootRegionName + "/" + name + "_2";
@@ -2305,16 +2305,16 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
type1 = ((SelectResults)r[j][0]).getCollectionType().getElementType();
type2 = ((SelectResults)r[j][1]).getCollectionType().getElementType();
if ((type1.getClass().getName()).equals(type2.getClass().getName())) {
- LogWriterSupport.getLogWriter().info("Both SelectResults are of the same Type i.e.--> "
+ LogWriterUtils.getLogWriter().info("Both SelectResults are of the same Type i.e.--> "
+ ((SelectResults)r[j][0]).getCollectionType().getElementType());
}
else {
- LogWriterSupport.getLogWriter().info("Classes are : " + type1.getClass().getName() + " "
+ LogWriterUtils.getLogWriter().info("Classes are : " + type1.getClass().getName() + " "
+ type2.getClass().getName());
fail("FAILED:Select result Type is different in both the cases");
}
if (((SelectResults)r[j][0]).size() == ((SelectResults)r[j][1]).size()) {
- LogWriterSupport.getLogWriter().info("Both SelectResults are of Same Size i.e. Size= "
+ LogWriterUtils.getLogWriter().info("Both SelectResults are of Same Size i.e. Size= "
+ ((SelectResults)r[j][1]).size());
}
else {
@@ -2325,7 +2325,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
set2 = (((SelectResults)r[j][1]).asSet());
set1 = (((SelectResults)r[j][0]).asSet());
- LogWriterSupport.getLogWriter().info(" SIZE1 = " + set1.size() + " SIZE2 = " + set2.size());
+ LogWriterUtils.getLogWriter().info(" SIZE1 = " + set1.size() + " SIZE2 = " + set2.size());
// boolean pass = true;
itert1 = set1.iterator();
@@ -2335,10 +2335,10 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
boolean exactMatch = false;
while (itert2.hasNext()) {
- LogWriterSupport.getLogWriter().info("### Comparing results..");
+ LogWriterUtils.getLogWriter().info("### Comparing results..");
Object p2 = itert2.next();
if (p1 instanceof Struct) {
- LogWriterSupport.getLogWriter().info("ITS a Set");
+ LogWriterUtils.getLogWriter().info("ITS a Set");
Object[] values1 = ((Struct)p1).getFieldValues();
Object[] values2 = ((Struct)p2).getFieldValues();
assertEquals(values1.length, values2.length);
@@ -2351,32 +2351,32 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
exactMatch = elementEqual;
}
else {
- LogWriterSupport.getLogWriter().info("Not a Set p2:" + p2 + " p1: " + p1);
+ LogWriterUtils.getLogWriter().info("Not a Set p2:" + p2 + " p1: " + p1);
if (p2 instanceof TestObject) {
- LogWriterSupport.getLogWriter().info("An instance of TestObject");
+ LogWriterUtils.getLogWriter().info("An instance of TestObject");
exactMatch = p2.equals(p1);
} else {
- LogWriterSupport.getLogWriter().info("Not an instance of TestObject" + p2.getClass().getCanonicalName());
+ LogWriterUtils.getLogWriter().info("Not an instance of TestObject" + p2.getClass().getCanonicalName());
exactMatch = p2.equals(p1);
}
}
if (exactMatch) {
- LogWriterSupport.getLogWriter().info("Exact MATCH");
+ LogWriterUtils.getLogWriter().info("Exact MATCH");
break;
}
}
if (!exactMatch) {
- LogWriterSupport.getLogWriter().info("NOT A MATCH");
+ LogWriterUtils.getLogWriter().info("NOT A MATCH");
fail("Atleast one element in the pair of SelectResults supposedly identical, is not equal ");
}
}
- LogWriterSupport.getLogWriter().info("### Done Comparing results..");
+ LogWriterUtils.getLogWriter().info("### Done Comparing results..");
}
}
protected void configAndStartBridgeServer() {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -2403,7 +2403,7 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query :" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query :" + queryString[i]);
Query query = qService.newQuery(queryString[i]);
results = (SelectResults)query.execute(params[i]);
} catch (Exception e) {
@@ -2442,11 +2442,11 @@ public class QueryUsingPoolDUnitTest extends CacheTestCase {
SerializableRunnable closeCache =
new CacheSerializableRunnable("Close Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close Client. ###");
+ LogWriterUtils.getLogWriter().info("### Close Client. ###");
try {
closeCache();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to get close client. ###");
+ LogWriterUtils.getLogWriter().info("### Failed to get close client. ###");
}
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/RemoteQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/RemoteQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/RemoteQueryDUnitTest.java
index 4be7218..fc79893 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/RemoteQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/RemoteQueryDUnitTest.java
@@ -43,10 +43,10 @@ import com.gemstone.gemfire.cache30.ClientServerTestCase;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -98,7 +98,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -124,7 +124,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -241,7 +241,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -267,7 +267,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -369,7 +369,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -395,7 +395,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -495,7 +495,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -628,7 +628,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -659,7 +659,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
} catch (Exception e) {
Assert.fail("Failed executing " + queryString, e);
}
- LogWriterSupport.getLogWriter().fine("size: " + results.size());
+ LogWriterUtils.getLogWriter().fine("size: " + results.size());
//assertEquals(numberOfEntries, results.size());
assertTrue(!results.getCollectionType().allowsDuplicates() && results.getCollectionType().getElementType().isStructType());
}
@@ -689,7 +689,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -715,7 +715,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -854,7 +854,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -885,7 +885,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
// Create client region
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
vm1.invoke(new CacheSerializableRunnable("Create region") {
public void run2() throws CacheException {
Properties config = new Properties();
@@ -955,7 +955,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -979,7 +979,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Create client region in VM1
vm1.invoke(new CacheSerializableRunnable("Create region") {
@@ -1114,7 +1114,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1138,7 +1138,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Create client region in VM1
vm1.invoke(new CacheSerializableRunnable("Create region") {
@@ -1218,7 +1218,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
vm0.invoke(new CacheSerializableRunnable("Create Bridge Server") {
public void run2() throws CacheException {
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
system = (InternalDistributedSystem) DistributedSystem.connect(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1249,7 +1249,7 @@ public class RemoteQueryDUnitTest extends CacheTestCase {
});
final int port = vm0.invokeInt(RemoteQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Create client region in VM1
vm1.invoke(new CacheSerializableRunnable("Create region") {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/ResourceManagerWithQueryMonitorDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/ResourceManagerWithQueryMonitorDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/ResourceManagerWithQueryMonitorDUnitTest.java
index f889d21..85ae9aa 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/ResourceManagerWithQueryMonitorDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/ResourceManagerWithQueryMonitorDUnitTest.java
@@ -66,14 +66,13 @@ import com.gemstone.gemfire.internal.cache.control.ResourceListener;
import com.gemstone.gemfire.internal.cache.control.TestMemoryThresholdListener;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
public class ResourceManagerWithQueryMonitorDUnitTest extends ClientServerTestCase {
@@ -727,7 +726,7 @@ public class ResourceManagerWithQueryMonitorDUnitTest extends ClientServerTestCa
//unless otherwise configured
releaseHook(server);
- Threads.join(queryExecution, 60000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(queryExecution, 60000);
//Make sure no exceptions were thrown during query testing
try {
assertEquals(0, queryExecution.getResult());
@@ -1024,7 +1023,7 @@ public class ResourceManagerWithQueryMonitorDUnitTest extends ClientServerTestCa
getSystem(props);
final ClientCacheFactory ccf = new ClientCacheFactory(props);
- ccf.addPoolServer(NetworkSupport.getServerHostName(server.getHost()), port);
+ ccf.addPoolServer(NetworkUtils.getServerHostName(server.getHost()), port);
ClientCache cache = (ClientCache)getClientCache(ccf);
}
});
@@ -1039,7 +1038,7 @@ public class ResourceManagerWithQueryMonitorDUnitTest extends ClientServerTestCa
getSystem(props);
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(server.getHost()), port);
+ pf.addServer(NetworkUtils.getServerHostName(server.getHost()), port);
pf.create("pool1");
AttributesFactory af = new AttributesFactory();
@@ -1061,7 +1060,7 @@ public class ResourceManagerWithQueryMonitorDUnitTest extends ClientServerTestCa
protected Properties getServerProperties(boolean disableQueryMonitorForMemory, int queryTimeout) {
Properties p = new Properties();
- p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
return p;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/SelectStarQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/SelectStarQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/SelectStarQueryDUnitTest.java
index 92d8e97..90ee7f7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/SelectStarQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/SelectStarQueryDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.internal.cache.VMCachedDeserializable;
import com.gemstone.gemfire.pdx.PdxInstance;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -119,9 +119,9 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
- cf.addPoolServer(NetworkSupport.getServerHostName(server2.getHost()), port2);
- cf.addPoolServer(NetworkSupport.getServerHostName(server3.getHost()), port3);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server2.getHost()), port2);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server3.getHost()), port3);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -145,7 +145,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -291,7 +291,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -319,7 +319,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -491,7 +491,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -515,7 +515,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -664,9 +664,9 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
- cf.addPoolServer(NetworkSupport.getServerHostName(server2.getHost()), port2);
- cf.addPoolServer(NetworkSupport.getServerHostName(server3.getHost()), port3);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server2.getHost()), port2);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server3.getHost()), port3);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -690,7 +690,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -820,7 +820,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -864,7 +864,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -940,7 +940,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -1022,7 +1022,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -1046,7 +1046,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -1243,7 +1243,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port1);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -1267,7 +1267,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -1470,7 +1470,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regName);
@@ -1486,7 +1486,7 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
client.invoke(new SerializableCallable("Query") {
@Override
public Object call() throws Exception {
- LogWriterSupport.getLogWriter().info("Querying remotely from client");
+ LogWriterUtils.getLogWriter().info("Querying remotely from client");
QueryService localQS = null;
QueryService remoteQS = null;
try {
@@ -1605,10 +1605,10 @@ public class SelectStarQueryDUnitTest extends CacheTestCase {
public void beforeIterationEvaluation(CompiledValue executer,
Object currentObject) {
if (currentObject instanceof VMCachedDeserializable) {
- LogWriterSupport.getLogWriter().fine("currentObject is serialized object");
+ LogWriterUtils.getLogWriter().fine("currentObject is serialized object");
isObjectSerialized = true;
} else {
- LogWriterSupport.getLogWriter().fine("currentObject is deserialized object");
+ LogWriterUtils.getLogWriter().fine("currentObject is deserialized object");
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/IndexCreationDeadLockJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/IndexCreationDeadLockJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/IndexCreationDeadLockJUnitTest.java
index d3c2925..57846a8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/IndexCreationDeadLockJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/IndexCreationDeadLockJUnitTest.java
@@ -46,7 +46,7 @@ import com.gemstone.gemfire.cache.query.IndexType;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.util.CacheWriterAdapter;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -190,7 +190,7 @@ public class IndexCreationDeadLockJUnitTest
Thread th = new IndexCreationDeadLockJUnitTest.PutThread("put thread");
th.start();
- Threads.join(th, 60 * 1000, null);
+ ThreadUtils.join(th, 60 * 1000);
}
/**
@@ -294,7 +294,7 @@ public class IndexCreationDeadLockJUnitTest
Thread indxCreationThread = new HelperThread("index creator thread");
indxCreationThread.start();
try {
- Threads.join(indxCreationThread, 30 * 1000, null);
+ ThreadUtils.join(indxCreationThread, 30 * 1000);
}
catch (Exception e) {
e.printStackTrace();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/LikePredicateJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/LikePredicateJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/LikePredicateJUnitTest.java
index 97918db..0bfad5e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/LikePredicateJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/functional/LikePredicateJUnitTest.java
@@ -57,7 +57,7 @@ import com.gemstone.gemfire.cache.query.internal.ResultsCollectionWrapper;
import com.gemstone.gemfire.cache.query.internal.index.IndexManager;
import com.gemstone.gemfire.cache.query.internal.index.IndexManager.TestHook;
import com.gemstone.gemfire.cache.query.internal.types.ObjectTypeImpl;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -2054,7 +2054,7 @@ public class LikePredicateJUnitTest {
assertEquals(5, rs[0][0].size());
// wait for remove to complete
- Threads.join(LikeQueryIndexTestHook.th, 60 * 1000, null);
+ ThreadUtils.join(LikeQueryIndexTestHook.th, 60 * 1000);
// The index should have been removed by now
assertEquals(0, cache.getQueryService().getIndexes().size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/ExecutionContextJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/ExecutionContextJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/ExecutionContextJUnitTest.java
index 4a767c7..41d6d62 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/ExecutionContextJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/ExecutionContextJUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import com.gemstone.gemfire.cache.query.data.Position;
import com.gemstone.gemfire.internal.cache.InternalCache;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -368,7 +368,7 @@ public class ExecutionContextJUnitTest {
for (int i =0; i < th.length ;++i) {
try {
- Threads.join(th[i], 30 * 1000, null);
+ ThreadUtils.join(th[i], 30 * 1000);
}catch(Exception e) {
fail(e.toString());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/AsynchIndexMaintenanceJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/AsynchIndexMaintenanceJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/AsynchIndexMaintenanceJUnitTest.java
index 4de8559..eeaa36c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/AsynchIndexMaintenanceJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/AsynchIndexMaintenanceJUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.cache.query.CacheUtils;
import com.gemstone.gemfire.cache.query.IndexType;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.query.data.Portfolio;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -248,7 +248,7 @@ public class AsynchIndexMaintenanceJUnitTest {
}
try {
for (int i = 0; i < TOTAL_THREADS; ++i) {
- Threads.join(threads[i], 30 * 1000, null);
+ ThreadUtils.join(threads[i], 30 * 1000);
}
}
catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexInitOnOverflowRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexInitOnOverflowRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexInitOnOverflowRegionDUnitTest.java
index 1a1a1e1..5528299 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexInitOnOverflowRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexInitOnOverflowRegionDUnitTest.java
@@ -47,9 +47,9 @@ import com.gemstone.gemfire.internal.cache.EvictionAttributesImpl;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -116,7 +116,7 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -184,10 +184,8 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
});
// If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30 * 1000, PRQHelp.getCache()
- .getLogger());
- Threads.join(asyncInv1, 30 * 1000, PRQHelp.getCache()
- .getLogger());
+ ThreadUtils.join(asyncInv2, 30 * 1000);
+ ThreadUtils.join(asyncInv1, 30 * 1000);
}
/**
@@ -236,7 +234,7 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
} catch (IOException e) {
e.printStackTrace();
}
@@ -258,7 +256,7 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
final int port = vm0.invokeInt(ConcurrentIndexInitOnOverflowRegionDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(vm0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(vm0.getHost());
// Start changing the value in Region which should turn into a deadlock if
// the fix is not there
@@ -327,10 +325,8 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
});
// If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30 * 1000, PRQHelp.getCache()
- .getLogger());
- Threads.join(asyncInv1, 30 * 1000, PRQHelp.getCache()
- .getLogger());
+ ThreadUtils.join(asyncInv2, 30 * 1000);
+ ThreadUtils.join(asyncInv1, 30 * 1000);
vm0.invoke(new CacheSerializableRunnable("Set Test Hook") {
@@ -435,8 +431,7 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
});
// Kill asynch thread
- Threads.join(indexUpdateAsysnch, 20000, PRQHelp.getCache()
- .getLogger());
+ ThreadUtils.join(indexUpdateAsysnch, 20000);
//Verify region size which must be 50
vm0.invoke(new CacheSerializableRunnable("Check region size") {
@@ -457,7 +452,7 @@ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase
switch (spot) {
case 6: // Before Index update and after region entry lock.
hooked = true;
- LogWriterSupport.getLogWriter().fine("IndexManagerTestHook is hooked.");
+ LogWriterUtils.getLogWriter().fine("IndexManagerTestHook is hooked.");
Wait.pause(10000);
hooked = false;
break;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexOperationsOnOverflowRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexOperationsOnOverflowRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexOperationsOnOverflowRegionDUnitTest.java
index 3a70271..81482b4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexOperationsOnOverflowRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexOperationsOnOverflowRegionDUnitTest.java
@@ -46,8 +46,8 @@ import com.gemstone.gemfire.internal.cache.EvictionAttributesImpl;
import com.gemstone.gemfire.internal.cache.PartitionedRegionQueryEvaluator.TestHook;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -117,7 +117,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -182,8 +182,8 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
});
//If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30*1000, PRQHelp.getCache().getLogger());
- Threads.join(asyncInv1, 30*1000, PRQHelp.getCache().getLogger());
+ ThreadUtils.join(asyncInv2, 30*1000);
+ ThreadUtils.join(asyncInv1, 30*1000);
}
/**
@@ -219,7 +219,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -284,8 +284,8 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
});
//If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30*1000, PRQHelp.getCache().getLogger());
- Threads.join(asyncInv1, 30*1000, PRQHelp.getCache().getLogger());
+ ThreadUtils.join(asyncInv2, 30*1000);
+ ThreadUtils.join(asyncInv1, 30*1000);
}
/**
@@ -324,7 +324,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -393,10 +393,8 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
});
// If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30 * 1000, PRQHelp.getCache()
- .getLogger());
- Threads.join(asyncInv1, 30 * 1000, PRQHelp.getCache()
- .getLogger());
+ ThreadUtils.join(asyncInv2, 30 * 1000);
+ ThreadUtils.join(asyncInv1, 30 * 1000);
}
/**
@@ -435,7 +433,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -504,10 +502,8 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
});
// If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30 * 1000, PRQHelp.getCache()
- .getLogger());
- Threads.join(asyncInv1, 30 * 1000, PRQHelp.getCache()
- .getLogger());
+ ThreadUtils.join(asyncInv2, 30 * 1000);
+ ThreadUtils.join(asyncInv1, 30 * 1000);
}
/**
@@ -534,7 +530,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -599,8 +595,8 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
});
//If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30*1000, PRQHelp.getCache().getLogger());
- Threads.join(asyncInv1, 30*1000, PRQHelp.getCache().getLogger());
+ ThreadUtils.join(asyncInv2, 30*1000);
+ ThreadUtils.join(asyncInv1, 30*1000);
}
/**
@@ -627,7 +623,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
RegionFactory regionFactory = cache.createRegionFactory(attr.create());
partitionRegion = regionFactory.create(name);
} catch (IllegalStateException ex) {
- LogWriterSupport.getLogWriter().warning("Creation caught IllegalStateException", ex);
+ LogWriterUtils.getLogWriter().warning("Creation caught IllegalStateException", ex);
}
assertNotNull("Region " + name + " not in cache", cache.getRegion(name));
assertNotNull("Region ref null", partitionRegion);
@@ -692,8 +688,8 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
});
//If we take more than 30 seconds then its a deadlock.
- Threads.join(asyncInv2, 30*1000, PRQHelp.getCache().getLogger());
- Threads.join(asyncInv1, 30*1000, PRQHelp.getCache().getLogger());
+ ThreadUtils.join(asyncInv2, 30*1000);
+ ThreadUtils.join(asyncInv1, 30*1000);
}
public class IndexManagerTestHook implements com.gemstone.gemfire.cache.query.internal.index.IndexManager.TestHook{
@@ -701,7 +697,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
switch (spot) {
case 5: //Before Index update and after region entry lock.
hooked = true;
- LogWriterSupport.getLogWriter().fine("IndexManagerTestHook is hooked.");
+ LogWriterUtils.getLogWriter().fine("IndexManagerTestHook is hooked.");
Wait.pause(10000);
//hooked = false;
break;
@@ -715,7 +711,7 @@ public class ConcurrentIndexOperationsOnOverflowRegionDUnitTest extends
switch (spot) {
case 5: //Before Index update and after region entry lock.
hooked = true;
- LogWriterSupport.getLogWriter().fine("IndexManagerTestHook is hooked.");
+ LogWriterUtils.getLogWriter().fine("IndexManagerTestHook is hooked.");
Wait.pause(100);
// hooked = false;
break;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest.java
index 0b3b194..4c83562 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest.java
@@ -46,10 +46,10 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.SerializableRunnableIF;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
/**
* This test is similar to {@link ConcurrentIndexUpdateWithoutWLDUnitTest} except
@@ -149,7 +149,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, stepSize));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 30*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 30*000);
}
for (AsyncInvocation inv : asyncInvs) {
@@ -194,7 +194,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
asyncInvs[1] = vm0.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, 0, totalDataSize));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 30*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 30*000);
}
for (AsyncInvocation inv : asyncInvs) {
if (inv.exceptionOccurred()) {
@@ -252,7 +252,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize ));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 60*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 60*000);
}
for (AsyncInvocation inv : asyncInvs) {
@@ -315,7 +315,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
asyncInvs[11] = vm3.invokeAsync(helper.getCacheSerializableRunnableForPRRandomOps(regionName, (3 * (stepSize)), totalDataSize ));
for (AsyncInvocation inv : asyncInvs) {
- Threads.join(inv, 60*000, helper.getCache().getLogger());
+ ThreadUtils.join(inv, 60*000);
}
for (AsyncInvocation inv : asyncInvs) {
if (inv.exceptionOccurred()) {
@@ -394,7 +394,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
if (index instanceof CompactRangeIndex) {
// Ignore invalid values.
if (value != Token.INVALID && value != Token.TOMBSTONE) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
Integer ID = ((Portfolio) value).getID();
assertTrue("Did not find index key for REgionEntry [key: "
@@ -432,7 +432,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
expectedNullEntries++;
}
} else {
- LogWriterSupport.getLogWriter().info(internalEntry.getKey()+"");
+ LogWriterUtils.getLogWriter().info(internalEntry.getKey()+"");
expectedUndefinedEntries++;
}
}
@@ -444,7 +444,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
Collection<Position> positions = ((Portfolio)value).positions.values();
for (Position pos : positions) {
if (pos != null) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
String secId = pos.secId;
assertTrue("Did not find index key for REgionEntry [key: "
+ internalEntry.getKey() + " , value: " + value
@@ -527,21 +527,21 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
.toArray()) {
getLogWriter().info(((RegionEntry) obj).getKey() + "");
}
-*/ LogWriterSupport.getLogWriter().info(
+*/ LogWriterUtils.getLogWriter().info(
" Expected Size of Index is: " + expectedIndexSize
+ " Undefined size is: " + expectedUndefinedEntries
+ " And NULL size is: " + expectedNullEntries);
assertEquals("No of index keys NOT equals the no shown in statistics for index:" + index.getName(), ((CompactRangeIndex) index).getIndexStorage().size(), stats.getNumberOfKeys());
} else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" Actual Size of Index is: " + actualSize + " Undefined size is: "
+ ((RangeIndex) index).undefinedMappedEntries.getNumEntries()
+ " And NULL size is: "
+ ((RangeIndex) index).nullMappedEntries.getNumEntries());
for (Object obj : ((RangeIndex) index).undefinedMappedEntries.map.keySet()) {
- LogWriterSupport.getLogWriter().info(((RegionEntry) obj).getKey() + "");
+ LogWriterUtils.getLogWriter().info(((RegionEntry) obj).getKey() + "");
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" Expected Size of Index is: " + expectedIndexSize
+ " Undefined size is: " + expectedUndefinedEntries
+ " And NULL size is: " + expectedNullEntries);
@@ -587,7 +587,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
if (index instanceof CompactRangeIndex) {
// Ignore invalid values.
if (value != Token.INVALID && value != Token.TOMBSTONE) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value));
Integer ID = ((Portfolio) value).getID();
assertTrue("Did not find index key for REgionEntry [key: "
@@ -635,7 +635,7 @@ public class ConcurrentIndexUpdateWithInplaceObjectModFalseDUnitTest extends
Collection<Position> positions = ((Portfolio)value).positions.values();
for (Position pos : positions) {
if (pos != null) {
- LogWriterSupport.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
+ LogWriterUtils.getLogWriter().info("Portfolio: "+ ((Portfolio)value) + "Position: " + pos);
String secId = pos.secId;
assertTrue("Did not find index key for REgionEntry [key: "
+ internalEntry.getKey() + " , value: " + value
[16/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36269DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36269DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36269DUnitTest.java
index d530a3b..94ffa6f 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36269DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36269DUnitTest.java
@@ -37,7 +37,7 @@ import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -108,7 +108,7 @@ public class Bug36269DUnitTest extends DistributedTestCase
{
try {
createClientCache();
- acquireConnectionsAndDestroyRegion(NetworkSupport.getServerHostName(Host.getHost(0)));
+ acquireConnectionsAndDestroyRegion(NetworkUtils.getServerHostName(Host.getHost(0)));
server1.invoke(Bug36269DUnitTest.class, "verifyRegionDestroy");
server2.invoke(Bug36269DUnitTest.class, "verifyRegionDestroy");
Wait.pause(5000);
@@ -141,7 +141,7 @@ public class Bug36269DUnitTest extends DistributedTestCase
new Bug36269DUnitTest("temp").createCache(props);
CacheServerTestUtil.disableShufflingOfEndpoints();
PoolImpl p;
- String host = NetworkSupport.getServerHostName(Host.getHost(0));
+ String host = NetworkUtils.getServerHostName(Host.getHost(0));
try {
p = (PoolImpl)PoolManager.createFactory()
.addServer(host, PORT1)
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36457DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36457DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36457DUnitTest.java
index 519975e..22a4eae 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36457DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36457DUnitTest.java
@@ -40,8 +40,8 @@ import com.gemstone.gemfire.internal.cache.ClientServerObserverHolder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -165,9 +165,9 @@ public class Bug36457DUnitTest extends DistributedTestCase
Integer port2 = ((Integer)server2.invoke(Bug36457DUnitTest.class,
"createServerCache"));
client1.invoke(Bug36457DUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(Bug36457DUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
//set a cllabck so that we come to know that whether a failover is called or not
// if failover is called means this bug is present.
client2.invoke(Bug36457DUnitTest.class, "setClientServerObserver");
@@ -191,7 +191,7 @@ public class Bug36457DUnitTest extends DistributedTestCase
.setInstance(new ClientServerObserverAdapter() {
public void afterPrimaryIdentificationFromBackup(ServerLocation primaryEndpoint)
{
- LogWriterSupport.getLogWriter().fine("TEST FAILED HERE YOGI ");
+ LogWriterUtils.getLogWriter().fine("TEST FAILED HERE YOGI ");
isFaileoverHappened = true;
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36805DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36805DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36805DUnitTest.java
index bc61ccc..236c8fa 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36805DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36805DUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -159,9 +159,9 @@ public class Bug36805DUnitTest extends DistributedTestCase
Integer port2 = ((Integer)server2.invoke(Bug36805DUnitTest.class,
"createServerCache"));
client1.invoke(Bug36805DUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(Bug36805DUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
// set a cllabck so that we come to know that whether a failover is called
// or not
// if failover is called means this bug is present.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36829DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36829DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36829DUnitTest.java
index b50942c..96bca47 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36829DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36829DUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
public class Bug36829DUnitTest extends DistributedTestCase {
@@ -68,7 +68,7 @@ public class Bug36829DUnitTest extends DistributedTestCase {
this.ClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(ClientVM.getHost()), PORT, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(ClientVM.getHost()), PORT, true, 0),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36995DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36995DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36995DUnitTest.java
index 7b4b626..d9ed901 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36995DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug36995DUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -168,7 +168,7 @@ public class Bug36995DUnitTest extends DistributedTestCase
Integer port3 = ((Integer)server3.invoke(Bug36995DUnitTest.class,
"createServerCache"));
createClientCacheWithDefaultMessageTrackingTimeout(
- NetworkSupport.getServerHostName(server1.getHost()), port1.intValue(), port2
+ NetworkUtils.getServerHostName(server1.getHost()), port1.intValue(), port2
.intValue(), port3.intValue());
assertEquals(PoolFactory.DEFAULT_SUBSCRIPTION_MESSAGE_TRACKING_TIMEOUT,
pool.getSubscriptionMessageTrackingTimeout());
@@ -187,7 +187,7 @@ public class Bug36995DUnitTest extends DistributedTestCase
"createServerCache"));
Integer port3 = ((Integer)server3.invoke(Bug36995DUnitTest.class,
"createServerCache"));
- createClientCache(NetworkSupport.getServerHostName(server1.getHost()),
+ createClientCache(NetworkUtils.getServerHostName(server1.getHost()),
port1.intValue(), port2.intValue(), port3.intValue());
assertEquals(54321, pool.getSubscriptionMessageTrackingTimeout());
}
@@ -203,7 +203,7 @@ public class Bug36995DUnitTest extends DistributedTestCase
"createServerCache"));
Integer port3 = ((Integer)server3.invoke(Bug36995DUnitTest.class,
"createServerCache"));
- createClientCache(NetworkSupport.getServerHostName(server1.getHost()),
+ createClientCache(NetworkUtils.getServerHostName(server1.getHost()),
port1.intValue(), port2.intValue(), port3.intValue());
verifyDeadAndLiveServers(0, 3);
server2.invoke(Bug36995DUnitTest.class, "stopServer");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37210DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37210DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37210DUnitTest.java
index fb9aa10..eddbda5 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37210DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37210DUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.internal.cache.ha.HARegionQueue;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.cache.client.*;
@@ -147,11 +147,11 @@ public class Bug37210DUnitTest extends DistributedTestCase
*/
public void testHAStatsCleanup() throws Exception
{
- LogWriterSupport.getLogWriter().info("testHAStatsCleanup : BEGIN");
+ LogWriterUtils.getLogWriter().info("testHAStatsCleanup : BEGIN");
IgnoredException.addIgnoredException("java.net.SocketException");
IgnoredException.addIgnoredException("Unexpected IOException");
client.invoke(Bug37210DUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT) });
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT) });
server.invoke(Bug37210DUnitTest.class, "doEntryOperations");
server.invoke(Bug37210DUnitTest.class,
@@ -161,7 +161,7 @@ public class Bug37210DUnitTest extends DistributedTestCase
Thread.currentThread().sleep(1000);
server.invoke(Bug37210DUnitTest.class,
"closeCacheClientProxyAndVerifyStats2");
- LogWriterSupport.getLogWriter().info("testHAStatsCleanup : END");
+ LogWriterUtils.getLogWriter().info("testHAStatsCleanup : END");
}
/**
@@ -190,7 +190,7 @@ public class Bug37210DUnitTest extends DistributedTestCase
server.setSocketBufferSize(32768);
server.setMaximumTimeBetweenPings(1000000);
server.start();
- LogWriterSupport.getLogWriter().info("Server started at PORT = " + port);
+ LogWriterUtils.getLogWriter().info("Server started at PORT = " + port);
return new Integer(server.getPort());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37805DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37805DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37805DUnitTest.java
index 68b5166..b135e61 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37805DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/Bug37805DUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.internal.cache.HARegion;
import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -85,7 +85,7 @@ public class Bug37805DUnitTest extends DistributedTestCase{
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
regionName,
getDurableClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTestUtil.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTestUtil.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTestUtil.java
index 78050cc..4a3cf5f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTestUtil.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTestUtil.java
@@ -52,9 +52,9 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl.PoolAttributes;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
/**
*
* @author Yogesh Mahajan
@@ -226,7 +226,7 @@ public class CacheServerTestUtil extends DistributedTestCase
ccf.set(DistributionConfig.DURABLE_CLIENT_ID_NAME, durableClientId);
ccf.set(DistributionConfig.DURABLE_CLIENT_TIMEOUT_NAME, String.valueOf(timeout));
ccf.set("log-file", "abs_client_system.log");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
cache = (Cache)ccf.create();
expected = IgnoredException.addIgnoredException("java.net.ConnectionException||java.net.SocketException");
pool = (PoolImpl)PoolManager.find(poolName);
@@ -258,9 +258,9 @@ public class CacheServerTestUtil extends DistributedTestCase
File cacheXmlFile = new File(url.toURI().getPath());
ccf.set("cache-xml-file", cacheXmlFile.toURI().getPath());
ccf.set("mcast-port", "0");
- ccf.set("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ ccf.set("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
ccf.set("log-file", "abs_server_system.log");
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
}
catch (URISyntaxException e) {
throw new ExceptionInInitializerError(e);
@@ -275,7 +275,7 @@ public class CacheServerTestUtil extends DistributedTestCase
File cacheXmlFile = new File(url.toURI().getPath());
ccf.set("cache-xml-file", cacheXmlFile.toURI().getPath());
ccf.set("mcast-port", "0");
- ccf.set("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ ccf.set("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
}
catch (URISyntaxException e) {
throw new ExceptionInInitializerError(e);
@@ -330,7 +330,7 @@ public class CacheServerTestUtil extends DistributedTestCase
{
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
- props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
new CacheServerTestUtil("temp").createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
@@ -352,7 +352,7 @@ public class CacheServerTestUtil extends DistributedTestCase
Properties props = new Properties();
// int mcastPort = AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS);
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
- props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
new CacheServerTestUtil("temp").createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
@@ -373,7 +373,7 @@ public class CacheServerTestUtil extends DistributedTestCase
throws Exception {
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
- props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
new CacheServerTestUtil("temp").createCache(props);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTransactionsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTransactionsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTransactionsDUnitTest.java
index 182c8df..18811ee 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTransactionsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTransactionsDUnitTest.java
@@ -39,8 +39,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -116,9 +116,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
{
Integer port1 = initServerCache(server1);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -153,9 +153,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -191,9 +191,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port2 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -227,9 +227,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
{
Integer port1 = initServerCache(server1);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -256,9 +256,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -288,9 +288,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port2 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -320,9 +320,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
{
Integer port1 = initServerCache(server1);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -349,9 +349,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -381,9 +381,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port2 });
Wait.pause(PAUSE);
server1.invoke(resetFlags());
@@ -414,9 +414,9 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
Integer port2 = ((Integer)server2.invoke(
CacheServerTransactionsDUnitTest.class, "createServerCache"));
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port2 });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port2 });
client1.invoke(CacheServerTransactionsDUnitTest.class, "commitTransactionOnClient");
Wait.pause(PAUSE);
@@ -460,7 +460,7 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
final Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
try {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"vlaue for the key k1" + r1.getEntry(k1).getValue());
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -570,7 +570,7 @@ public class CacheServerTransactionsDUnitTest extends DistributedTestCase
final Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
try {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"vlaue for the key k1" + r1.getEntry(k1).getValue());
// wait until
// condition is
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClearPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClearPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClearPropagationDUnitTest.java
index 3441259..626d9e2 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClearPropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClearPropagationDUnitTest.java
@@ -39,8 +39,8 @@ import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.cache.client.*;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
@@ -106,9 +106,9 @@ public class ClearPropagationDUnitTest extends DistributedTestCase
"createServerCache")).intValue();
client1.invoke(ClearPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1), new Integer(PORT2) });
client2.invoke(ClearPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1), new Integer(PORT2) });
CacheObserverHolder.setInstance(new CacheObserverAdapter());
@@ -163,7 +163,7 @@ public class ClearPropagationDUnitTest extends DistributedTestCase
client1.invoke(ClearPropagationDUnitTest.class,
"acquireConnectionsAndClear",
- new Object[] { NetworkSupport.getServerHostName(client1.getHost())});
+ new Object[] { NetworkUtils.getServerHostName(client1.getHost())});
client1.invoke(checkSizeRegion(2, false/*Do not Block*/));
client2.invoke(checkSizeRegion(0, true /* block*/));
@@ -214,7 +214,7 @@ public class ClearPropagationDUnitTest extends DistributedTestCase
client1.invoke(ClearPropagationDUnitTest.class,
"acquireConnectionsAndDestroyRegion",
- new Object[] { NetworkSupport.getServerHostName(client1.getHost())});
+ new Object[] { NetworkUtils.getServerHostName(client1.getHost())});
client1.invoke(checkSizeRegion(2, false/*Do not Block*/));
client2.invoke(checkDestroyRegion(true /* block*/));
@@ -266,7 +266,7 @@ public class ClearPropagationDUnitTest extends DistributedTestCase
{
Region region = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Size of the region " + region.size());
+ LogWriterUtils.getLogWriter().info("Size of the region " + region.size());
if (toBlock) {
synchronized (ClearPropagationDUnitTest.class) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientConflationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientConflationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientConflationDUnitTest.java
index 479d0c6..2cf340e 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientConflationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientConflationDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.internal.cache.ClientServerObserverHolder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -133,8 +133,8 @@ public class ClientConflationDUnitTest extends DistributedTestCase
}
private void performSteps(String conflation) throws Exception {
- createClientCacheFeeder(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT));
- vm1.invoke(ClientConflationDUnitTest.class, "createClientCache", new Object[] { NetworkSupport.getServerHostName(vm1.getHost()), new Integer(PORT),
+ createClientCacheFeeder(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT));
+ vm1.invoke(ClientConflationDUnitTest.class, "createClientCache", new Object[] { NetworkUtils.getServerHostName(vm1.getHost()), new Integer(PORT),
conflation});
vm1.invoke(ClientConflationDUnitTest.class, "setClientServerObserverForBeforeInterestRecovery");
vm1.invoke(ClientConflationDUnitTest.class, "setAllCountersZero");
@@ -505,7 +505,7 @@ public class ClientConflationDUnitTest extends DistributedTestCase
public static void putEntries()
{
try {
- LogWriterSupport.getLogWriter().info("Putting entries...");
+ LogWriterUtils.getLogWriter().info("Putting entries...");
Region r1 = cacheFeeder.getRegion(Region.SEPARATOR +REGION_NAME1);
Region r2 = cacheFeeder.getRegion(Region.SEPARATOR +REGION_NAME2);
r1.put("key-1", "11");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientInterestNotifyDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientInterestNotifyDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientInterestNotifyDUnitTest.java
index 146ee3b..404d47f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientInterestNotifyDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientInterestNotifyDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -186,14 +186,14 @@ public class ClientInterestNotifyDUnitTest extends DistributedTestCase
// Create a feeder.
vm0.invoke(ClientInterestNotifyDUnitTest.class, "createClientCacheFeeder",
- new Object[] {NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});
+ new Object[] {NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});
// Client 1 overrides NBS to true.
// Client 2 "overrides" NSB to false.
// Client 3 uses the default NBS which is false on the server.
vm1.invoke(ClientInterestNotifyDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT), "ClientOn"});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT), "ClientOn"});
/*
vm2.invoke(ClientInterestNotifyDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(Host.getHost(0)), new Integer(PORT),
@@ -572,7 +572,7 @@ public class ClientInterestNotifyDUnitTest extends DistributedTestCase
public static void doEntryOps()
{
try {
- LogWriterSupport.getLogWriter().info("Putting entries...");
+ LogWriterUtils.getLogWriter().info("Putting entries...");
Cache cacheClient = GemFireCacheImpl.getInstance();
Region r1 = cacheClient.getRegion(Region.SEPARATOR +REGION_NAME1);
Region r2 = cacheClient.getRegion(Region.SEPARATOR +REGION_NAME2);
@@ -602,7 +602,7 @@ public class ClientInterestNotifyDUnitTest extends DistributedTestCase
public static void doFeed()
{
try {
- LogWriterSupport.getLogWriter().info("Putting entries...");
+ LogWriterUtils.getLogWriter().info("Putting entries...");
Cache cacheClient = GemFireCacheImpl.getInstance();
Region r1 = cacheClient.getRegion(Region.SEPARATOR +REGION_NAME1);
Region r2 = cacheClient.getRegion(Region.SEPARATOR +REGION_NAME2);
@@ -623,7 +623,7 @@ public class ClientInterestNotifyDUnitTest extends DistributedTestCase
public static void getEntries()
{
try {
- LogWriterSupport.getLogWriter().info("Getting entries...");
+ LogWriterUtils.getLogWriter().info("Getting entries...");
Cache cacheClient = GemFireCacheImpl.getInstance();
Region r3 = cacheClient.getRegion(Region.SEPARATOR +REGION_NAME3);
r3.get("key-1");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientServerMiscDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientServerMiscDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientServerMiscDUnitTest.java
index c0300f2..0f40428 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientServerMiscDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientServerMiscDUnitTest.java
@@ -44,8 +44,8 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -147,13 +147,13 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
public void testConcurrentOperationsWithDRandPR() throws Exception {
int port1 = initServerCache(true); // vm0
int port2 = initServerCache2(true); // vm1
- String serverName = NetworkSupport.getServerHostName(Host.getHost(0));
+ String serverName = NetworkUtils.getServerHostName(Host.getHost(0));
host.getVM(2).invoke(this.getClass(), "createClientCacheV", new Object[]{serverName, port1});
host.getVM(3).invoke(this.getClass(), "createClientCacheV", new Object[]{serverName, port2});
- LogWriterSupport.getLogWriter().info("Testing concurrent map operations from a client with a distributed region");
+ LogWriterUtils.getLogWriter().info("Testing concurrent map operations from a client with a distributed region");
concurrentMapTest(host.getVM(2), "/" + REGION_NAME1);
// TODO add verification in vm3
- LogWriterSupport.getLogWriter().info("Testing concurrent map operations from a client with a partitioned region");
+ LogWriterUtils.getLogWriter().info("Testing concurrent map operations from a client with a partitioned region");
concurrentMapTest(host.getVM(2), "/" + PR_REGION_NAME);
// TODO add verification in vm3
}
@@ -161,13 +161,13 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
public void testConcurrentOperationsWithDRandPRandEmptyClient() throws Exception {
int port1 = initServerCache(true); // vm0
int port2 = initServerCache2(true); // vm1
- String serverName = NetworkSupport.getServerHostName(Host.getHost(0));
+ String serverName = NetworkUtils.getServerHostName(Host.getHost(0));
host.getVM(2).invoke(this.getClass(), "createEmptyClientCache", new Object[]{serverName, port1});
host.getVM(3).invoke(this.getClass(), "createClientCacheV", new Object[]{serverName, port2});
- LogWriterSupport.getLogWriter().info("Testing concurrent map operations from a client with a distributed region");
+ LogWriterUtils.getLogWriter().info("Testing concurrent map operations from a client with a distributed region");
concurrentMapTest(host.getVM(2), "/" + REGION_NAME1);
// TODO add verification in vm3
- LogWriterSupport.getLogWriter().info("Testing concurrent map operations from a client with a partitioned region");
+ LogWriterUtils.getLogWriter().info("Testing concurrent map operations from a client with a partitioned region");
concurrentMapTest(host.getVM(2), "/" + PR_REGION_NAME);
// TODO add verification in vm3
}
@@ -379,7 +379,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
{
// start server first
PORT1 = initServerCache(true);
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1);
populateCache();
registerInterest();
server1.invoke(ClientServerMiscDUnitTest.class, "put");
@@ -412,7 +412,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
{
// start server first
PORT1 = initServerCache(true);
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1);
populateCache();
registerInterestInBothTheRegions();
closeRegion1();
@@ -435,7 +435,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
{
// start server first
PORT1 = initServerCache(true);
- pool = (PoolImpl)createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)),PORT1);
+ pool = (PoolImpl)createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)),PORT1);
populateCache();
registerInterestInBothTheRegions();
closeBothRegions();
@@ -462,7 +462,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
public void testCCPDestroyOnLastDestroyRegion() throws Exception
{
PORT1 = initServerCache(true);
- PoolImpl pool = (PoolImpl)createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)),PORT1);
+ PoolImpl pool = (PoolImpl)createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)),PORT1);
destroyRegion1();
// pause(5000);
server1.invoke(ClientServerMiscDUnitTest.class,
@@ -499,7 +499,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
{
// start server first
PORT1 = initServerCache(false);
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1);
registerInterestForInvalidatesInBothTheRegions();
populateCache();
server1.invoke(ClientServerMiscDUnitTest.class, "put");
@@ -520,7 +520,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
{
// start server first
PORT1 = initServerCache(false);
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1);
registerInterestForInvalidatesInBothTheRegions();
Region region = static_cache.getRegion(REGION_NAME1);
populateCache();
@@ -566,7 +566,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
new ClientServerMiscDUnitTest("temp").createCache(props);
- String host = NetworkSupport.getServerHostName(server1.getHost());
+ String host = NetworkUtils.getServerHostName(server1.getHost());
PoolImpl p = (PoolImpl)PoolManager.createFactory()
.addServer(host, PORT1)
.setSubscriptionEnabled(true)
@@ -669,7 +669,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
ds.disconnect();
ds = getSystem(props);
PORT1 = initServerCache(true);
- String host = NetworkSupport.getServerHostName(server1.getHost());
+ String host = NetworkUtils.getServerHostName(server1.getHost());
Pool p = PoolManager.createFactory()
.addServer(host, PORT1)
.setSubscriptionEnabled(true)
@@ -720,7 +720,7 @@ public class ClientServerMiscDUnitTest extends CacheTestCase
assertNotNull(ds);
PORT1 = initServerCache(true);
- String host = NetworkSupport.getServerHostName(server1.getHost());
+ String host = NetworkUtils.getServerHostName(server1.getHost());
Pool p = PoolManager.createFactory()
.addServer(host, PORT1)
.setSubscriptionEnabled(true)
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ConflationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ConflationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ConflationDUnitTest.java
index b6a64a3..26411b4 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ConflationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ConflationDUnitTest.java
@@ -43,8 +43,8 @@ import com.gemstone.gemfire.internal.cache.ha.HARegionQueue;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -141,9 +141,9 @@ public class ConflationDUnitTest extends DistributedTestCase
{
try {
vm0.invoke(ConflationDUnitTest.class, "setIsSlowStart");
- createClientCache1UniqueWriter ( NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT));
+ createClientCache1UniqueWriter ( NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT));
vm2.invoke(ConflationDUnitTest.class, "createClientCache2UniqueWriter",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});
vm2.invoke(ConflationDUnitTest.class, "setClientServerObserverForBeforeInterestRecovery");
vm2.invoke(ConflationDUnitTest.class, "setAllCountersZero");
vm2.invoke(ConflationDUnitTest.class, "assertAllCountersZero");
@@ -172,9 +172,9 @@ public class ConflationDUnitTest extends DistributedTestCase
{
try {
vm0.invoke(ConflationDUnitTest.class, "setIsSlowStart");
- createClientCache1CommonWriter( NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT));
+ createClientCache1CommonWriter( NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT));
vm2.invoke(ConflationDUnitTest.class, "createClientCache2CommonWriter",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});
vm2.invoke(ConflationDUnitTest.class, "setClientServerObserverForBeforeInterestRecovery");
vm2.invoke(ConflationDUnitTest.class, "setAllCountersZero");
vm2.invoke(ConflationDUnitTest.class, "assertAllCountersZero");
@@ -204,10 +204,10 @@ public class ConflationDUnitTest extends DistributedTestCase
{
try {
vm0.invoke(ConflationDUnitTest.class, "setIsSlowStart");
- createClientCache1CommonWriterTest3(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT));
+ createClientCache1CommonWriterTest3(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT));
vm2.invoke(ConflationDUnitTest.class,
"createClientCache2CommonWriterTest3", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT) });
+ NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT) });
vm2.invoke(ConflationDUnitTest.class, "setClientServerObserverForBeforeInterestRecovery");
vm2.invoke(ConflationDUnitTest.class, "setAllCountersZero");
vm2.invoke(ConflationDUnitTest.class, "assertAllCountersZero");
@@ -306,7 +306,7 @@ public class ConflationDUnitTest extends DistributedTestCase
factory.setPoolName(createPool(host,"p1", port, true).getName());
factory.addCacheListener(new CacheListenerAdapter() {
public void afterCreate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
String val = (String) event.getNewValue();
synchronized (ConflationDUnitTest.class) {
if (val.equals(MARKER)) {
@@ -322,7 +322,7 @@ public class ConflationDUnitTest extends DistributedTestCase
}
public void afterUpdate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
synchronized (this) {
counterUpdate++;
}
@@ -330,7 +330,7 @@ public class ConflationDUnitTest extends DistributedTestCase
public void afterDestroy(EntryEvent event)
{
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
synchronized (this) {
if(!event.getKey().equals(MARKER)) {
counterDestroy++;
@@ -353,7 +353,7 @@ public class ConflationDUnitTest extends DistributedTestCase
factory.setPoolName(createPool(host,"p1", port, true).getName());
factory.addCacheListener(new CacheListenerAdapter() {
public void afterCreate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
String val = (String)event.getNewValue();
synchronized (ConflationDUnitTest.class) {
if (val.equals(MARKER)) {
@@ -369,14 +369,14 @@ public class ConflationDUnitTest extends DistributedTestCase
}
public void afterUpdate(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
synchronized (this) {
counterUpdate++;
}
}
public void afterDestroy(EntryEvent event) {
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
synchronized (this) {
if (!event.getKey().equals(MARKER)) {
counterDestroy++;
@@ -426,7 +426,7 @@ public class ConflationDUnitTest extends DistributedTestCase
public void afterCreate(EntryEvent event)
{
String val = (String) event.getNewValue();
- LogWriterSupport.getLogWriter().info("Listener received event " + event);
+ LogWriterUtils.getLogWriter().info("Listener received event " + event);
synchronized (ConflationDUnitTest.class) {
if (val.equals(MARKER)) {
count++;
@@ -820,7 +820,7 @@ public class ConflationDUnitTest extends DistributedTestCase
HARegionQueue haRegionQueue = HAHelper.getRegionQueue(region);
statMap.put("eventsConflated", new Long(HAHelper.getRegionQueueStats(
haRegionQueue).getEventsConflated()));
- LogWriterSupport.getLogWriter().info("new Stats Map : " + statMap.toString());
+ LogWriterUtils.getLogWriter().info("new Stats Map : " + statMap.toString());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DataSerializerPropogationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DataSerializerPropogationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DataSerializerPropogationDUnitTest.java
index 51d7c9b..27359a6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DataSerializerPropogationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DataSerializerPropogationDUnitTest.java
@@ -49,9 +49,9 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.StoppableWaitCriterion;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
@@ -174,7 +174,8 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
}
finally {
- DistributedTestSupport.unregisterAllDataSerializersFromAllVms();
+ DataSerializerPropogationDUnitTest.successfullyLoadedTestDataSerializer = false;
+ DistributedTestUtils.unregisterAllDataSerializersFromAllVms();
}
}
@@ -415,7 +416,7 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
// wait for client2 to come online
Wait.pause(3000);
@@ -473,10 +474,10 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2) });
// wait for client2 to come online
Wait.pause(2000);
@@ -505,10 +506,10 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2) });
// wait for client2 to come online
Wait.pause(2000);
@@ -537,10 +538,10 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2) });
// wait for client2 to come online
Wait.pause(2000);
@@ -594,10 +595,10 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2) });
// wait for client2 to come online
Wait.pause(2000);
@@ -627,10 +628,10 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
PORT2 = initServerCache(server2);
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2) });
client1.invoke(DataSerializerPropogationDUnitTest.class,
"registerDSObject7");
@@ -697,11 +698,11 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
- createClientCache(NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2));
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ createClientCache(NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2));
// wait for client2 to come online
Wait.pause(2000);
@@ -735,12 +736,12 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
PORT1 = initServerCache(server1, 1);
PORT2 = initServerCache(server2, 2);
- createClientCache(NetworkSupport.getServerHostName(server1.getHost()),
+ createClientCache(NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1));
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2) });
setClientServerObserver1();
client2
.invoke(DataSerializerPropogationDUnitTest.class, "setClientServerObserver2");
@@ -763,10 +764,10 @@ public class DataSerializerPropogationDUnitTest extends DistributedTestCase {
client1.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(DataSerializerPropogationDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2) });
// wait for client2 to come online
Wait.pause(2000);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DestroyEntryPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DestroyEntryPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DestroyEntryPropagationDUnitTest.java
index fbe87c8..dfe2fe3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DestroyEntryPropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DestroyEntryPropagationDUnitTest.java
@@ -41,8 +41,8 @@ import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -105,9 +105,9 @@ public class DestroyEntryPropagationDUnitTest extends DistributedTestCase
PORT2 = ((Integer)vm1.invoke(DestroyEntryPropagationDUnitTest.class, "createServerCache" )).intValue();
vm2.invoke(DestroyEntryPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1),new Integer(PORT2)});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1),new Integer(PORT2)});
vm3.invoke(DestroyEntryPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1),new Integer(PORT2)});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1),new Integer(PORT2)});
}
@@ -301,10 +301,10 @@ public class DestroyEntryPropagationDUnitTest extends DistributedTestCase
{
try {
Iterator iter = cache.getCacheServers().iterator();
- LogWriterSupport.getLogWriter().fine ("Asif: servers running = "+cache.getCacheServers().size());
+ LogWriterUtils.getLogWriter().fine ("Asif: servers running = "+cache.getCacheServers().size());
if (iter.hasNext()) {
CacheServer server = (CacheServer)iter.next();
- LogWriterSupport.getLogWriter().fine("asif : server running on port="+server.getPort()+ " asked to kill serevre onport="+port);
+ LogWriterUtils.getLogWriter().fine("asif : server running on port="+server.getPort()+ " asked to kill serevre onport="+port);
if(port.intValue() == server.getPort()){
server.stop();
}
@@ -452,7 +452,7 @@ public class DestroyEntryPropagationDUnitTest extends DistributedTestCase
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setPoolName(p.getName());
- factory.setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ factory.setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
RegionAttributes attrs = factory.create();
cache.createRegion(REGION_NAME, attrs);
@@ -464,7 +464,7 @@ public class DestroyEntryPropagationDUnitTest extends DistributedTestCase
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
- factory.setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ factory.setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
RegionAttributes attrs = factory.create();
cache.createRegion(REGION_NAME, attrs);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientBug39997DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientBug39997DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientBug39997DUnitTest.java
index 94fe536..fb8fb3e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientBug39997DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientBug39997DUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -51,7 +51,7 @@ public class DurableClientBug39997DUnitTest extends CacheTestCase {
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- final String hostName = NetworkSupport.getServerHostName(host);
+ final String hostName = NetworkUtils.getServerHostName(host);
final int port = AvailablePortHelper.getRandomAvailableTCPPort();
vm0.invoke(new SerializableRunnable("create cache") {
public void run() {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientQueueSizeDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientQueueSizeDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientQueueSizeDUnitTest.java
index 75beaa4..b00c536 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientQueueSizeDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientQueueSizeDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.VM;
@@ -277,7 +277,7 @@ public class DurableClientQueueSizeDUnitTest extends DistributedTestCase {
public static Integer createCacheServer(Integer serverPort)
throws Exception {
Properties props = new Properties();
- props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
// props.setProperty("log-level", "fine");
// props.setProperty("log-file", "server_" + OSProcess.getId() + ".log");
// props.setProperty("statistic-archive-file", "server_" + OSProcess.getId()
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectAutoDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectAutoDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectAutoDUnitTest.java
index 664d5ca..f9e7d87 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectAutoDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectAutoDUnitTest.java
@@ -19,9 +19,9 @@ package com.gemstone.gemfire.internal.cache.tier.sockets;
import com.gemstone.gemfire.cache.client.PoolFactory;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
/**
* @author dsmith
@@ -52,7 +52,7 @@ public class DurableClientReconnectAutoDUnitTest extends
protected PoolFactory getPoolFactory() {
Host host = Host.getHost(0);
PoolFactory factory = PoolManager.createFactory()
- .addLocator(NetworkSupport.getServerHostName(host), DistributedTestSupport.getDUnitLocatorPort());
+ .addLocator(NetworkUtils.getServerHostName(host), DistributedTestUtils.getDUnitLocatorPort());
return factory;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectDUnitTest.java
index 45c74d2..f642733 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientReconnectDUnitTest.java
@@ -46,8 +46,8 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -110,10 +110,10 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
PORT2 = ((Integer) server2.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
PORT3 = ((Integer) server3.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
PORT4 = ((Integer) server4.invoke(DurableClientReconnectDUnitTest.class, "createServerCache"));
- SERVER1 = NetworkSupport.getServerHostName(host)+PORT1;
- SERVER2 = NetworkSupport.getServerHostName(host)+PORT2;
- SERVER3 = NetworkSupport.getServerHostName(host)+PORT3;
- SERVER4 = NetworkSupport.getServerHostName(host)+PORT4;
+ SERVER1 = NetworkUtils.getServerHostName(host)+PORT1;
+ SERVER2 = NetworkUtils.getServerHostName(host)+PORT2;
+ SERVER3 = NetworkUtils.getServerHostName(host)+PORT3;
+ SERVER4 = NetworkUtils.getServerHostName(host)+PORT4;
//CacheServerTestUtil.disableShufflingOfEndpoints();
System.setProperty("gemfire.bridge.disableShufflingOfEndpoints", "false");
@@ -121,7 +121,7 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
}
public void testDurableReconnectSingleServer() throws Exception
{
- createCacheClientAndConnectToSingleServer(NetworkSupport.getServerHostName(Host.getHost(0)), 0);
+ createCacheClientAndConnectToSingleServer(NetworkUtils.getServerHostName(Host.getHost(0)), 0);
List redundantServers = pool.getRedundantNames();
String primaryName = pool.getPrimaryName();
assertTrue(redundantServers.isEmpty());
@@ -131,7 +131,7 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
//temporary fix for bug 38345.
Wait.pause(2000);
- createCacheClientAndConnectToSingleServer(NetworkSupport.getServerHostName(Host.getHost(0)), 0);
+ createCacheClientAndConnectToSingleServer(NetworkUtils.getServerHostName(Host.getHost(0)), 0);
List redundantServers2 = pool.getRedundantNames();
String primaryName2 = pool.getPrimaryName();
assertTrue(redundantServers2.isEmpty());
@@ -139,13 +139,13 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
}
public void testDurableReconnectSingleServerWithZeroConnPerServer() throws Exception
{
- createCacheClientAndConnectToSingleServerWithZeroConnPerServer(NetworkSupport.getServerHostName(Host.getHost(0)), 0);
+ createCacheClientAndConnectToSingleServerWithZeroConnPerServer(NetworkUtils.getServerHostName(Host.getHost(0)), 0);
List redundantServers = pool.getRedundantNames();
String primaryName = pool.getPrimaryName();
assertTrue(redundantServers.isEmpty());
closeCache(true);
- createCacheClientAndConnectToSingleServerWithZeroConnPerServer(NetworkSupport.getServerHostName(Host.getHost(0)), 0);
+ createCacheClientAndConnectToSingleServerWithZeroConnPerServer(NetworkUtils.getServerHostName(Host.getHost(0)), 0);
List redundantServers2 = pool.getRedundantNames();
String primaryName2 = pool.getPrimaryName();
assertTrue(redundantServers2.isEmpty());
@@ -382,51 +382,51 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
instance.determineAndVerfiyRedundantServers(redundantServers);
instance.determineAndVerfiyNonRedundantServers(redundantServers);
- LogWriterSupport.getLogWriter().info("TEST - Durable client initialially has servers " + redundantServers);
+ LogWriterUtils.getLogWriter().info("TEST - Durable client initialially has servers " + redundantServers);
- LogWriterSupport.getLogWriter().info("TEST - Closing durable client for the first time");
+ LogWriterUtils.getLogWriter().info("TEST - Closing durable client for the first time");
// Stop the durable client
closeCache(true);
- LogWriterSupport.getLogWriter().info("TEST - Durable client closed for the first time");
+ LogWriterUtils.getLogWriter().info("TEST - Durable client closed for the first time");
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
Wait.pause(2000);
- LogWriterSupport.getLogWriter().info("TEST - Creating the durable client with one fewer servers");
+ LogWriterUtils.getLogWriter().info("TEST - Creating the durable client with one fewer servers");
//We recreate the durable client, but this
//Time we won't have it create any queues
createCacheClient(2, 20, false);
HashSet redundantServers2 = new HashSet(pool.getRedundantNames());
redundantServers2.add(pool.getPrimaryName());
- LogWriterSupport.getLogWriter().info("TEST - Durable client created again, now with servers " + redundantServers2);
+ LogWriterUtils.getLogWriter().info("TEST - Durable client created again, now with servers " + redundantServers2);
Host host = Host.getHost(0);
//Make sure we create client to server connections to all of the servers
- pool.acquireConnection(new ServerLocation(NetworkSupport.getServerHostName(host), PORT1.intValue()));
- pool.acquireConnection(new ServerLocation(NetworkSupport.getServerHostName(host), PORT2.intValue()));
- pool.acquireConnection(new ServerLocation(NetworkSupport.getServerHostName(host), PORT3.intValue()));
- pool.acquireConnection(new ServerLocation(NetworkSupport.getServerHostName(host), PORT4.intValue()));
+ pool.acquireConnection(new ServerLocation(NetworkUtils.getServerHostName(host), PORT1.intValue()));
+ pool.acquireConnection(new ServerLocation(NetworkUtils.getServerHostName(host), PORT2.intValue()));
+ pool.acquireConnection(new ServerLocation(NetworkUtils.getServerHostName(host), PORT3.intValue()));
+ pool.acquireConnection(new ServerLocation(NetworkUtils.getServerHostName(host), PORT4.intValue()));
- LogWriterSupport.getLogWriter().info("TEST - All pool connections are now aquired");
+ LogWriterUtils.getLogWriter().info("TEST - All pool connections are now aquired");
closeCache(true);
- LogWriterSupport.getLogWriter().info("TEST - closed durable client for the second time");
+ LogWriterUtils.getLogWriter().info("TEST - closed durable client for the second time");
//Wait for server to cleanup client resources
//temporary fix for bug 38345.
Wait.pause(2000);
- LogWriterSupport.getLogWriter().info("TEST - creating durable client for the third time");
+ LogWriterUtils.getLogWriter().info("TEST - creating durable client for the third time");
//Now we should connect to all of the servers we were originally connected to
createCacheClient(2, 20);
HashSet redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
redundantServersAfterReconnect.add(pool.getPrimaryName());
- LogWriterSupport.getLogWriter().info("TEST - durable client created for the third time, now with servers " + redundantServersAfterReconnect);
+ LogWriterUtils.getLogWriter().info("TEST - durable client created for the third time, now with servers " + redundantServersAfterReconnect);
instance.determineAndVerfiyRedundantServers(redundantServersAfterReconnect);
instance.determineAndVerfiyNonRedundantServers(redundantServersAfterReconnect);
@@ -436,7 +436,7 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
//Now we wait to make sure the durable client expiration task isn't fired.
Wait.pause(25000);
- LogWriterSupport.getLogWriter().info("TEST - Finished waiting for durable client expiration task");
+ LogWriterUtils.getLogWriter().info("TEST - Finished waiting for durable client expiration task");
redundantServersAfterReconnect = new HashSet(pool.getRedundantNames());
redundantServersAfterReconnect.add(pool.getPrimaryName());
@@ -603,10 +603,10 @@ public class DurableClientReconnectDUnitTest extends DistributedTestCase
protected PoolFactory getPoolFactory() {
Host host = Host.getHost(0);
PoolFactory factory = PoolManager.createFactory()
- .addServer(NetworkSupport.getServerHostName(host), PORT1.intValue())
- .addServer(NetworkSupport.getServerHostName(host), PORT2.intValue())
- .addServer(NetworkSupport.getServerHostName(host), PORT3.intValue())
- .addServer(NetworkSupport.getServerHostName(host), PORT4.intValue());
+ .addServer(NetworkUtils.getServerHostName(host), PORT1.intValue())
+ .addServer(NetworkUtils.getServerHostName(host), PORT2.intValue())
+ .addServer(NetworkUtils.getServerHostName(host), PORT3.intValue())
+ .addServer(NetworkUtils.getServerHostName(host), PORT4.intValue());
return factory;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientStatsDUnitTest.java
index 9eb0dcd..d491513 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientStatsDUnitTest.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -181,7 +181,7 @@ public class DurableClientStatsDUnitTest extends DistributedTestCase {
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
regionName,
getDurableClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
@@ -206,7 +206,7 @@ public class DurableClientStatsDUnitTest extends DistributedTestCase {
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
regionName,
getNonDurableClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
@@ -231,7 +231,7 @@ public class DurableClientStatsDUnitTest extends DistributedTestCase {
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
regionName,
getDurableClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
@@ -255,7 +255,7 @@ public class DurableClientStatsDUnitTest extends DistributedTestCase {
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, true, 0),
regionName,
getNonDurableClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
[18/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRColocationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRColocationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRColocationDUnitTest.java
index 82e7d3c..5ce60f3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRColocationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRColocationDUnitTest.java
@@ -64,7 +64,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -717,7 +717,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
assertNotNull(basicGetCache());
Region pr = basicGetCache().createRegion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + pr.toString());
}
@@ -754,7 +754,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
catch (Exception Expected) {
Expected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Expected Message : " + Expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Expected Message : " + Expected.getMessage());
assertTrue(Expected.getMessage().startsWith(
"Colocated regions should have accessors at the same node"));
}
@@ -789,7 +789,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
assertNotNull(basicGetCache());
Region pr = basicGetCache().createRegion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + pr.toString());
}
@@ -825,7 +825,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
+ "should have accessors at the same node");
}
catch (Exception Expected) {
- LogWriterSupport.getLogWriter().info("Expected Message : " + Expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Expected Message : " + Expected.getMessage());
assertTrue(Expected.getMessage().startsWith(
"Colocated regions should have accessors at the same node"));
}
@@ -860,7 +860,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
assertNotNull(basicGetCache());
Region pr = basicGetCache().createRegion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + pr.toString());
}
@@ -888,7 +888,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
assertNotNull(basicGetCache());
Region pr = basicGetCache().createRegion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + pr.toString());
}
@@ -927,7 +927,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
+ "as colocated regions are not configured to be at the same nodes.");
}
catch (Exception Expected) {
- LogWriterSupport.getLogWriter().info("Expected Message : " + Expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Expected Message : " + Expected.getMessage());
assertTrue(Expected.getMessage().contains("Cannot create buckets, as "
+ "colocated regions are not configured to be at the same nodes."));
}
@@ -963,7 +963,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
catch (Exception NotExpected) {
NotExpected.printStackTrace();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Unexpected Exception Message : " + NotExpected.getMessage());
Assert.fail("Unpexpected Exception" , NotExpected);
}
@@ -1018,7 +1018,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
+ expectedExMessage);
}
catch (Exception Expected) {
- LogWriterSupport.getLogWriter().info("Expected Messageee : " + Expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Expected Messageee : " + Expected.getMessage());
assertTrue(Expected.getMessage().contains(expectedExMessage));
}
}
@@ -1036,7 +1036,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
+ expectedExMessage);
}
catch (Exception Expected) {
- LogWriterSupport.getLogWriter().info("Expected Messageee : " + Expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Expected Messageee : " + Expected.getMessage());
assertTrue(Expected.getMessage().contains(expectedExMessage));
}
}
@@ -1096,7 +1096,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
+ expectedExMessage);
}
catch (IllegalStateException expected) {
- LogWriterSupport.getLogWriter().info("Got message: " + expected.getMessage());
+ LogWriterUtils.getLogWriter().info("Got message: " + expected.getMessage());
assertTrue(expected.getMessage().contains(expectedExMessage));
}
}
@@ -1114,7 +1114,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
catch (Exception unexpected) {
unexpected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Unexpected Message: " + unexpected.getMessage());
+ LogWriterUtils.getLogWriter().info("Unexpected Message: " + unexpected.getMessage());
fail("Could not destroy the child region.");
}
}
@@ -1131,7 +1131,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
catch (Exception unexpected) {
unexpected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Unexpected Message: " + unexpected.getMessage());
+ LogWriterUtils.getLogWriter().info("Unexpected Message: " + unexpected.getMessage());
fail("Could not destroy the parent region.");
}
}
@@ -1226,7 +1226,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(1);
prForCustomer.put(dummy, new Integer(100));
assertEquals(prForCustomer.get(dummy), new Integer(100));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Key :" + dummy.dummyID + " Value :"
+ prForCustomer.get(dummy));
@@ -1235,14 +1235,14 @@ public class PRColocationDUnitTest extends CacheTestCase {
assertNotNull(prForOrder);
prForOrder.put(dummy, new Integer(200));
assertEquals(prForOrder.get(dummy), new Integer(200));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Key :" + dummy.dummyID + " Value :" + prForOrder.get(dummy));
return null;
}
});
} catch (Exception unexpected) {
unexpected.printStackTrace();
- LogWriterSupport.getLogWriter().info("Unexpected Message: " + unexpected.getMessage());
+ LogWriterUtils.getLogWriter().info("Unexpected Message: " + unexpected.getMessage());
fail("Test failed");
}
}
@@ -2072,13 +2072,13 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
Iterator primaryBucketIterator = primaryBucketListForCustomer.iterator();
while (primaryBucketIterator.hasNext()) {
- LogWriterSupport.getLogWriter().info("Primary Bucket : " + primaryBucketIterator.next());
+ LogWriterUtils.getLogWriter().info("Primary Bucket : " + primaryBucketIterator.next());
}
Iterator SecondaryBucketIterator = secondaryBucketListForCustomer
.iterator();
while (SecondaryBucketIterator.hasNext()) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Secondary Bucket : " + SecondaryBucketIterator.next());
}
}
@@ -2119,12 +2119,12 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
Iterator primaryBucketIterator = primaryBucketListForOrder.iterator();
while (primaryBucketIterator.hasNext()) {
- LogWriterSupport.getLogWriter().info("Primary Bucket : " + primaryBucketIterator.next());
+ LogWriterUtils.getLogWriter().info("Primary Bucket : " + primaryBucketIterator.next());
}
Iterator SecondaryBucketIterator = secondaryBucketListForOrder.iterator();
while (SecondaryBucketIterator.hasNext()) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Secondary Bucket : " + SecondaryBucketIterator.next());
}
}
@@ -2165,13 +2165,13 @@ public class PRColocationDUnitTest extends CacheTestCase {
}
Iterator primaryBucketIterator = primaryBucketListForShipment.iterator();
while (primaryBucketIterator.hasNext()) {
- LogWriterSupport.getLogWriter().info("Primary Bucket : " + primaryBucketIterator.next());
+ LogWriterUtils.getLogWriter().info("Primary Bucket : " + primaryBucketIterator.next());
}
Iterator SecondaryBucketIterator = secondaryBucketListForShipment
.iterator();
while (SecondaryBucketIterator.hasNext()) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Secondary Bucket : " + SecondaryBucketIterator.next());
}
}
@@ -2211,10 +2211,10 @@ public class PRColocationDUnitTest extends CacheTestCase {
HashMap localBucket2RegionMap = (HashMap)customerPartitionedregion
.getDataStore().getSizeLocally();
int customerBucketSize = localBucket2RegionMap.size();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + customerPartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of primary buckets the " + customerPartitionedRegionName + " in this VM :- "
+ customerPartitionedregion.getDataStore().getNumberOfPrimaryBucketsManaged());
Set customerEntrySet = localBucket2RegionMap.entrySet();
@@ -2224,7 +2224,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
Map.Entry me = (Map.Entry)customerIterator.next();
Integer size = (Integer)me.getValue();
assertEquals(1, size.intValue());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the Bucket " + me.getKey() + ": - " + size.toString());
}
@@ -2232,10 +2232,10 @@ public class PRColocationDUnitTest extends CacheTestCase {
localBucket2RegionMap = (HashMap)orderPartitionedregion.getDataStore()
.getSizeLocally();
int orderBucketSize = localBucket2RegionMap.size();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + orderPartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of primary buckets the " + orderPartitionedRegionName + " in this VM :- "
+ orderPartitionedregion.getDataStore().getNumberOfPrimaryBucketsManaged());
@@ -2246,16 +2246,16 @@ public class PRColocationDUnitTest extends CacheTestCase {
Map.Entry me = (Map.Entry)orderIterator.next();
Integer size = (Integer)me.getValue();
assertEquals(10, size.intValue());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the Bucket " + me.getKey() + ": - " + size.toString());
}
localBucket2RegionMap = (HashMap)shipmentPartitionedregion.getDataStore()
.getSizeLocally();
int shipmentBucketSize = localBucket2RegionMap.size();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + shipmentPartitionedRegionName + " in this VM :- "
+ localBucket2RegionMap.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of primary buckets the " + shipmentPartitionedRegionName + " in this VM :- "
+ shipmentPartitionedregion.getDataStore().getNumberOfPrimaryBucketsManaged());
Set shipmentEntrySet = localBucket2RegionMap.entrySet();
@@ -2265,7 +2265,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
Map.Entry me = (Map.Entry)shipmentIterator.next();
Integer size = (Integer)me.getValue();
assertEquals(100, size.intValue());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the Bucket " + me.getKey() + ": - " + size.toString());
}
@@ -2363,7 +2363,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
// assertNotNull(orderPartitionedregion.get(orderId));
if (custId.equals(orderId.getCustId())) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
orderId + "belongs to node " + idmForCustomer + " "
+ idmForOrder);
assertEquals(idmForCustomer, idmForOrder);
@@ -2375,7 +2375,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
ShipmentId shipmentId = (ShipmentId)shipmentIterator.next();
// assertNotNull(shipmentPartitionedregion.get(shipmentId));
if (orderId.equals(shipmentId.getOrderId())) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
shipmentId + "belongs to node " + idmForOrder + " "
+ idmForShipment);
}
@@ -2460,7 +2460,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
+ LogWriterUtils.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
}
}
@@ -2486,7 +2486,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
}
@@ -2513,7 +2513,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
}
@@ -2542,7 +2542,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
"putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -2576,7 +2576,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
Region pr = basicGetCache().getRegion(partitionedRegionName);
assertNotNull(pr);
try {
- LogWriterSupport.getLogWriter().info("Destroying Partitioned Region " + partitionedRegionName);
+ LogWriterUtils.getLogWriter().info("Destroying Partitioned Region " + partitionedRegionName);
pr.destroyRegion();
fail("Did not get the expected ISE");
} catch (Exception e) {
@@ -2607,7 +2607,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
assertNotNull(basicGetCache());
Region pr = basicGetCache().createRegion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + pr.toString());
}
@@ -2628,7 +2628,7 @@ public class PRColocationDUnitTest extends CacheTestCase {
attr.setPartitionAttributes(prAttr);
Region pr = root.createSubregion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned sub region " + pr.getName()
+ " created Successfully :" + pr.toString());
if(localMaxMemory == 0){
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRCustomPartitioningDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRCustomPartitioningDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRCustomPartitioningDUnitTest.java
index 45eae3c..981db10 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRCustomPartitioningDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRCustomPartitioningDUnitTest.java
@@ -53,7 +53,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDataStore.BucketVisi
import com.gemstone.gemfire.internal.cache.xmlcache.Declarable2;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -155,12 +155,12 @@ public class PRCustomPartitioningDUnitTest extends
for (int b = 0; b < numBucks; b++) {
if (par.getBucketKeys(b).contains(key)) {
foundIt = true;
- LogWriterSupport.getLogWriter().info("Key " + key + " found in bucket " + b);
+ LogWriterUtils.getLogWriter().info("Key " + key + " found in bucket " + b);
break;
}
}
if (!foundIt) {
- LogWriterSupport.getLogWriter().severe("Key " + key + " not found in any bucket");
+ LogWriterUtils.getLogWriter().severe("Key " + key + " not found in any bucket");
}
return foundIt;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionDUnitTest.java
index e35e4fe..0c4b7a9 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionDUnitTest.java
@@ -70,10 +70,10 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -995,7 +995,7 @@ public class PRFunctionExecutionDUnitTest extends
});
assertEquals(Boolean.TRUE, o);
- Threads.join(async[0], 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
@@ -1093,7 +1093,7 @@ public class PRFunctionExecutionDUnitTest extends
});
assertEquals(Boolean.TRUE, o);
- Threads.join(async[0], 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[0], 60 * 1000);
if (async[0].getException() != null) {
Assert.fail("UnExpected Exception Occured : ", async[0].getException());
@@ -1590,7 +1590,7 @@ public class PRFunctionExecutionDUnitTest extends
}
catch (Exception expected) {
// No data should cause exec to throw
- LogWriterSupport.getLogWriter().warning("Exception Occured : "+ expected.getMessage());
+ LogWriterUtils.getLogWriter().warning("Exception Occured : "+ expected.getMessage());
// boolean expectedStr = expected.getMessage().startsWith("No target
// node was found for routingKey");
// assertTrue("Unexpected exception: " + expected, expectedStr);
@@ -2247,7 +2247,7 @@ public class PRFunctionExecutionDUnitTest extends
ResultCollector rc1 = dataSet.withArgs(Boolean.TRUE)
.execute(function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRFunctionExecutionDUnitTest#testExecutionOnAllNodes_byName : Result size :"
+ l.size() + " Result : " + l);
assertEquals(4, l.size());
@@ -2432,7 +2432,7 @@ public class PRFunctionExecutionDUnitTest extends
}
});
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRFunctionExecutionDUnitTest#testExecutionOnAllNodes_byName : Result size :"
+ l.size() + " Result : " + l);
assertEquals(4, l.size());
@@ -2537,7 +2537,7 @@ public class PRFunctionExecutionDUnitTest extends
}
});
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRFunctionExecutionDUnitTest#testExecutionOnAllNodes_byName : Result size :"
+ l.size() + " Result : " + l);
assertEquals(4, l.size());
@@ -2612,7 +2612,7 @@ public class PRFunctionExecutionDUnitTest extends
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().fine("Customer :- { " + custid + " : " + customer + " }");
+ LogWriterUtils.getLogWriter().fine("Customer :- { " + custid + " : " + customer + " }");
}
Function function = new TestFunction(true,TestFunction.TEST_FUNCTION3);
@@ -2723,7 +2723,7 @@ public class PRFunctionExecutionDUnitTest extends
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().fine("Customer :- { " + custid + " : " + customer + " }");
+ LogWriterUtils.getLogWriter().fine("Customer :- { " + custid + " : " + customer + " }");
}
PartitionedRegion partitionedregion = (PartitionedRegion)getCache().getRegion(rName2);
@@ -2745,7 +2745,7 @@ public class PRFunctionExecutionDUnitTest extends
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().fine("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().fine("Order :- { " + orderId + " : " + order + " }");
}
}
@@ -3054,7 +3054,7 @@ public class PRFunctionExecutionDUnitTest extends
ds.disconnect();
}
catch (Exception e) {
- LogWriterSupport.getLogWriter().info("Exception Occured : " + e.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured : " + e.getMessage());
e.printStackTrace();
Assert.fail("Test failed", e);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionWithResultSenderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionWithResultSenderDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionWithResultSenderDUnitTest.java
index 6848345..458ac5d 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionWithResultSenderDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRFunctionExecutionWithResultSenderDUnitTest.java
@@ -48,7 +48,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.internal.cache.PartitionedRegionTestHelper;
import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -585,7 +585,7 @@ public class PRFunctionExecutionWithResultSenderDUnitTest extends
ResultCollector rc1 = dataSet.withArgs(Boolean.TRUE).execute(
function.getId());
List l = ((List)rc1.getResult());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PRFunctionExecutionDUnitTest#testExecutionOnAllNodes_byName : Result size :"
+ l.size() + " Result : " + l);
assertEquals(4, l.size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRPerformanceTestDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRPerformanceTestDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRPerformanceTestDUnitTest.java
index 53b6257..cd35f50 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRPerformanceTestDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRPerformanceTestDUnitTest.java
@@ -54,7 +54,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -159,7 +159,7 @@ public class PRPerformanceTestDUnitTest extends
}
}
if (!foundIt) {
- LogWriterSupport.getLogWriter().severe("Key " + key + " not found in any bucket");
+ LogWriterUtils.getLogWriter().severe("Key " + key + " not found in any bucket");
}
return foundIt;
}
@@ -253,7 +253,7 @@ public class PRPerformanceTestDUnitTest extends
list = (ArrayList)rc.getResult();
}
catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Exception Occured :" + ex.getMessage());
+ LogWriterUtils.getLogWriter().info("Exception Occured :" + ex.getMessage());
Assert.fail("Test failed",ex);
}
Object val = list.get(0);
@@ -279,7 +279,7 @@ public class PRPerformanceTestDUnitTest extends
}
t.stop();
- LogWriterSupport.getLogWriter().info("Time taken to iterate over " + vals.size()+ " no. of keys: " + t.getTimeInMs() + " ms");
+ LogWriterUtils.getLogWriter().info("Time taken to iterate over " + vals.size()+ " no. of keys: " + t.getTimeInMs() + " ms");
// Call the execute method for each key and see if this takes more time
@@ -306,7 +306,7 @@ public class PRPerformanceTestDUnitTest extends
}
t.stop();
assertEquals(vals.size(),listOfKeys.size());
- LogWriterSupport.getLogWriter().info("Time taken to iterate over " + vals.size()+ " no. of keys using FunctionExecution: " + t.getTimeInMs() + " ms");
+ LogWriterUtils.getLogWriter().info("Time taken to iterate over " + vals.size()+ " no. of keys using FunctionExecution: " + t.getTimeInMs() + " ms");
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRTransactionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRTransactionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRTransactionDUnitTest.java
index d8c3f3a..b253dc0 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRTransactionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/execute/PRTransactionDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.cache.execute.data.Shipment;
import com.gemstone.gemfire.internal.cache.execute.data.ShipmentId;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
/**
@@ -155,7 +155,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
filter.clear();
args.clear();
args.add(new Integer(VERIFY_NON_COLOCATION));
- LogWriterSupport.getLogWriter().info("VERIFY_NON_COLOCATION");
+ LogWriterUtils.getLogWriter().info("VERIFY_NON_COLOCATION");
args.add(custId);
args.add(newCus);
args.add(orderId);
@@ -167,7 +167,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
fail("Expected exception was not thrown");
}
catch (FunctionException fe) {
- LogWriterSupport.getLogWriter().info("Caught Expected exception");
+ LogWriterUtils.getLogWriter().info("Caught Expected exception");
if(fe.getCause() instanceof TransactionDataNotColocatedException) {
}
else {
@@ -178,7 +178,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
}
// verify that the transaction modifications are applied
args.set(0, new Integer(VERIFY_TX));
- LogWriterSupport.getLogWriter().info("VERIFY_TX");
+ LogWriterUtils.getLogWriter().info("VERIFY_TX");
orderpr.put(orderId, order);
assertNotNull(orderpr.get(orderId));
e.withFilter(filter).withArgs(args).execute(txFunction.getId())
@@ -194,17 +194,17 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
.getResult();
// verify that the transaction is rolled back
args.set(0, new Integer(VERIFY_ROLLBACK));
- LogWriterSupport.getLogWriter().info("VERIFY_ROLLBACK");
+ LogWriterUtils.getLogWriter().info("VERIFY_ROLLBACK");
e.withFilter(filter).withArgs(args).execute(txFunction.getId())
.getResult();
// verify destroy
args.set(0, new Integer(VERIFY_DESTROY));
- LogWriterSupport.getLogWriter().info("VERIFY_DESTROY");
+ LogWriterUtils.getLogWriter().info("VERIFY_DESTROY");
e.withFilter(filter).withArgs(args).execute(txFunction.getId())
.getResult();
// verify invalidate
args.set(0, new Integer(VERIFY_INVALIDATE));
- LogWriterSupport.getLogWriter().info("VERIFY_INVALIDATE");
+ LogWriterUtils.getLogWriter().info("VERIFY_INVALIDATE");
e.withFilter(filter).withArgs(args).execute(txFunction.getId())
.getResult();
return Boolean.TRUE;
@@ -403,7 +403,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
orderPartitionedregion.getDataStore().dumpEntries(false);
Iterator custIterator = customerPartitionedregion.getDataStore()
.getEntries().iterator();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Found " + customerPartitionedregion.getDataStore().getEntries().size()
+ " Customer entries in the partition");
Region.Entry custEntry = null;
@@ -413,7 +413,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
Customer cust = (Customer)custEntry.getValue();
Iterator orderIterator = orderPartitionedregion.getDataStore()
.getEntries().iterator();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Found " + orderPartitionedregion.getDataStore().getEntries().size()
+ " Order entries in the partition");
int orderPerCustomer = 0;
@@ -468,7 +468,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
filter.clear();
args.clear();
args.add(new Integer(VERIFY_LISTENER_CALLBACK));
- LogWriterSupport.getLogWriter().info("VERIFY_LISTENER_CALLBACK");
+ LogWriterUtils.getLogWriter().info("VERIFY_LISTENER_CALLBACK");
args.add(custId);
args.add(newCus);
args.add(orderId);
@@ -516,7 +516,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
filter.clear();
args.clear();
args.add(new Integer(VERIFY_REP_READ));
- LogWriterSupport.getLogWriter().info("VERIFY_REP_READ");
+ LogWriterUtils.getLogWriter().info("VERIFY_REP_READ");
args.add(custId);
args.add(newCus);
args.add(orderId);
@@ -561,7 +561,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
Execution e = FunctionService.onRegion(customerPR);
// for each customer, update order and shipment
for (int iterations = 1; iterations <= totalIterations; iterations++) {
- LogWriterSupport.getLogWriter().info("running perfFunction");
+ LogWriterUtils.getLogWriter().info("running perfFunction");
long startTime = 0;
ArrayList args = new ArrayList();
CustId custId = new CustId(iterations % 10);
@@ -602,7 +602,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
Execution e = FunctionService.onRegion(customerPR);
// for each customer, update order and shipment
for (int iterations = 1; iterations <= totalIterations; iterations++) {
- LogWriterSupport.getLogWriter().info("Running perfFunction");
+ LogWriterUtils.getLogWriter().info("Running perfFunction");
long startTime = 0;
ArrayList args = new ArrayList();
CustId custId = new CustId(iterations % 10);
@@ -638,7 +638,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
double diff = (perfTime.longValue() - perfTxTime.longValue()) * 1.0;
double percentDiff = (diff / perfTime.longValue()) * 100;
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
(totalIterations - warmupIterations) + " iterations of function took:"
+ +perfTime.longValue() + " Nanos, and transaction function took:"
+ perfTxTime.longValue() + " Nanos, difference :" + diff
@@ -719,7 +719,7 @@ public class PRTransactionDUnitTest extends PRColocationDUnitTest {
}
}
mgr.commit();
- LogWriterSupport.getLogWriter().info("COMMIT completed");
+ LogWriterUtils.getLogWriter().info("COMMIT completed");
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARQAddOperationJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARQAddOperationJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARQAddOperationJUnitTest.java
index 6e3efcf..f299391 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARQAddOperationJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARQAddOperationJUnitTest.java
@@ -31,7 +31,7 @@ import org.junit.experimental.categories.Category;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.internal.cache.Conflatable;
import com.gemstone.gemfire.internal.cache.EventID;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -153,7 +153,7 @@ public class BlockingHARQAddOperationJUnitTest extends
};
takeThread.start();
- Wait.staticPause(20 * 1000);
+ Wait.pause(20 * 1000);
if (!takeThread.isAlive()) {
fail("take() thread died ");
}
@@ -161,7 +161,7 @@ public class BlockingHARQAddOperationJUnitTest extends
ConflatableObject c1 = new ConflatableObject(KEY1, VALUE1, id1,
conflationEnabled, "region1");
rq.put(c1);
- Threads.join(takeThread, 20 * 1000, null);
+ ThreadUtils.join(takeThread, 20 * 1000);
assertEquals(1, takenObjects.size());
Conflatable obj = (Conflatable)takenObjects.get(0);
assertNotNull(obj);
@@ -221,7 +221,7 @@ public class BlockingHARQAddOperationJUnitTest extends
rq.put(c);
}
for (int i = 0; i < totalTakeThreads; i++) {
- Threads.join(takeThreads[i], 20 * 1000, null);
+ ThreadUtils.join(takeThreads[i], 20 * 1000);
}
assertEquals(totalTakeThreads, takenObjects.size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
index db5874c..9e79239 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/BlockingHARegionJUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.internal.AvailablePort;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -77,8 +77,8 @@ public class BlockingHARegionJUnitTest
thread1.start();
thread2.start();
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
if (exceptionOccured) {
fail(" Test failed due to " + exceptionString);
@@ -152,8 +152,8 @@ public class BlockingHARegionJUnitTest
};
Wait.waitForCriterion(ev, 30 * 1000, 1000, true);
- Threads.join(thread1, 30 * 1000, null); // for completeness
- Threads.join(thread2, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000); // for completeness
+ ThreadUtils.join(thread2, 30 * 1000);
if (exceptionOccured) {
fail(" Test failed due to " + exceptionString);
}
@@ -226,11 +226,11 @@ public class BlockingHARegionJUnitTest
Thread.sleep(2000);
- Threads.join(thread1, 5 * 60 * 1000, null);
- Threads.join(thread2, 5 * 60 * 1000, null);
- Threads.join(thread3, 5 * 60 * 1000, null);
- Threads.join(thread4, 5 * 60 * 1000, null);
- Threads.join(thread5, 5 * 60 * 1000, null);
+ ThreadUtils.join(thread1, 5 * 60 * 1000);
+ ThreadUtils.join(thread2, 5 * 60 * 1000);
+ ThreadUtils.join(thread3, 5 * 60 * 1000);
+ ThreadUtils.join(thread4, 5 * 60 * 1000);
+ ThreadUtils.join(thread5, 5 * 60 * 1000);
cache.close();
}
@@ -283,11 +283,11 @@ public class BlockingHARegionJUnitTest
thread9.start();
thread10.start();
- Threads.join(thread6, 30 * 1000, null);
- Threads.join(thread7, 30 * 1000, null);
- Threads.join(thread8, 30 * 1000, null);
- Threads.join(thread9, 30 * 1000, null);
- Threads.join(thread10, 30 * 1000, null);
+ ThreadUtils.join(thread6, 30 * 1000);
+ ThreadUtils.join(thread7, 30 * 1000);
+ ThreadUtils.join(thread8, 30 * 1000);
+ ThreadUtils.join(thread9, 30 * 1000);
+ ThreadUtils.join(thread10, 30 * 1000);
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -320,11 +320,11 @@ public class BlockingHARegionJUnitTest
Thread.sleep(2000);
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
- Threads.join(thread3, 30 * 1000, null);
- Threads.join(thread4, 30 * 1000, null);
- Threads.join(thread5, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
+ ThreadUtils.join(thread3, 30 * 1000);
+ ThreadUtils.join(thread4, 30 * 1000);
+ ThreadUtils.join(thread5, 30 * 1000);
cache.close();
}
@@ -378,7 +378,7 @@ public class BlockingHARegionJUnitTest
}
};
t1.start();
- Threads.join(t1, 20 * 1000, null);
+ ThreadUtils.join(t1, 20 * 1000);
if (exceptionOccured) {
fail(" Test failed due to " + exceptionString);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
index 757b02a..09e0fbf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug36853EventsExpiryDUnitTest.java
@@ -37,8 +37,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -114,7 +114,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
"createServerCache")).intValue();
client.invoke(Bug36853EventsExpiryDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT2) });
}
@@ -180,11 +180,11 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
public void afterCreate(EntryEvent event)
{
String key = (String)event.getKey();
- LogWriterSupport.getLogWriter().info("client2 : afterCreate : key =" + key);
+ LogWriterUtils.getLogWriter().info("client2 : afterCreate : key =" + key);
if (key.equals(LAST_KEY)) {
synchronized (Bug36853EventsExpiryDUnitTest.class) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Notifying client2 to proceed for validation");
proceedForValidation = true;
Bug36853EventsExpiryDUnitTest.class.notify();
@@ -259,7 +259,7 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
synchronized (Bug36853EventsExpiryDUnitTest.class) {
if (!proceedForValidation)
try {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Client2 going in wait before starting validation");
Bug36853EventsExpiryDUnitTest.class.wait(5000);
}
@@ -268,13 +268,13 @@ public class Bug36853EventsExpiryDUnitTest extends CacheTestCase
}
}
}
- LogWriterSupport.getLogWriter().info("Starting validation on client2");
+ LogWriterUtils.getLogWriter().info("Starting validation on client2");
Assert.assertEquals(
"Puts recieved by client not equal to the puts done at server.",
TOTAL_PUTS, putsRecievedByClient);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("putsRecievedByClient = " + putsRecievedByClient);
- LogWriterSupport.getLogWriter().info("Validation complete on client2");
+ LogWriterUtils.getLogWriter().info("Validation complete on client2");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
index 7e72f7f..ccea0f8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/Bug48571DUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientNotifier;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.VM;
@@ -109,7 +109,7 @@ public class Bug48571DUnitTest extends DistributedTestCase {
public static int createServerCache() throws Exception {
Properties props = new Properties();
- props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
props.setProperty("log-file", "server_" + OSProcess.getId() + ".log");
props.setProperty("log-level", "info");
props.setProperty("statistic-archive-file", "server_" + OSProcess.getId()
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
index 49faa64..77459b3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/EventIdOptimizationDUnitTest.java
@@ -43,8 +43,8 @@ import com.gemstone.gemfire.internal.cache.EntryEventImpl;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.cache.client.internal.ServerRegionProxy;
import com.gemstone.gemfire.cache.client.internal.Connection;
@@ -175,9 +175,9 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
"createServerCache")).intValue();
client1.invoke(EventIdOptimizationDUnitTest.class, "createClientCache1",
- new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT1) });
+ new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT1) });
client2.invoke(EventIdOptimizationDUnitTest.class, "createClientCache2",
- new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT2) });
}
@@ -454,7 +454,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
synchronized (EventIdOptimizationDUnitTest.class) {
if (!proceedForValidation)
try {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Client2 going in wait before starting validation");
EventIdOptimizationDUnitTest.class.wait();
}
@@ -463,12 +463,12 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
}
}
}
- LogWriterSupport.getLogWriter().info("Starting validation on client2");
+ LogWriterUtils.getLogWriter().info("Starting validation on client2");
if (validationFailed) {
fail("\n The following eventIds recieved by client2 were not present in the eventId array sent by client1 \n"
+ failureMsg);
}
- LogWriterSupport.getLogWriter().info("Validation complete on client2, goin to unregister listeners");
+ LogWriterUtils.getLogWriter().info("Validation complete on client2, goin to unregister listeners");
Region region = cache.getRegion(Region.SEPARATOR + REGION_NAME);
if (region != null && !region.isDestroyed()) {
@@ -492,7 +492,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
}
}
- LogWriterSupport.getLogWriter().info("Test completed, Unregistered the listeners");
+ LogWriterUtils.getLogWriter().info("Test completed, Unregistered the listeners");
}
/**
@@ -572,7 +572,7 @@ public class EventIdOptimizationDUnitTest extends DistributedTestCase
&& (eventIdAtClient2.getSequenceID() == eventIdForLastKey
.getSequenceID())) {
synchronized (EventIdOptimizationDUnitTest.class) {
- LogWriterSupport.getLogWriter().info("Notifying client2 to proceed for validation");
+ LogWriterUtils.getLogWriter().info("Notifying client2 to proceed for validation");
proceedForValidation = true;
EventIdOptimizationDUnitTest.class.notify();
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
index d08f3d5..8d365c3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/FailoverDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -90,7 +90,7 @@ public class FailoverDUnitTest extends DistributedTestCase
PORT2 = ((Integer)vm1.invoke(FailoverDUnitTest.class, "createServerCache" )).intValue();
CacheServerTestUtil.disableShufflingOfEndpoints();
- createClientCache(NetworkSupport.getServerHostName(host), new Integer(PORT1),new Integer(PORT2));
+ createClientCache(NetworkUtils.getServerHostName(host), new Integer(PORT1),new Integer(PORT2));
{ // calculate the primary vm
waitForPrimaryAndBackups(1);
PoolImpl pool = (PoolImpl)PoolManager.find("FailoverPool");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
index e9c8c86..7fbbdcf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HABugInPutDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -97,9 +97,9 @@ public class HABugInPutDUnitTest extends DistributedTestCase
.intValue();
client1.invoke(HABugInPutDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(host), new Integer(PORT1), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(host), new Integer(PORT1), new Integer(PORT2) });
client2.invoke(HABugInPutDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(host), new Integer(PORT1), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(host), new Integer(PORT1), new Integer(PORT2) });
//Boolean.getBoolean("")
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
index 0806bbf..3eeed6b 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAClearDUnitTest.java
@@ -39,8 +39,8 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.cache.Region;
@@ -529,7 +529,7 @@ public class HAClearDUnitTest extends DistributedTestCase
{
Region region = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Size of the region " + region.size());
+ LogWriterUtils.getLogWriter().info("Size of the region " + region.size());
assertEquals(size, region.size());
}
};
@@ -544,7 +544,7 @@ public class HAClearDUnitTest extends DistributedTestCase
public void run2() throws CacheException
{
Region region = cache.getRegion(Region.SEPARATOR + REGION_NAME);
- LogWriterSupport.getLogWriter().warning("Found region " + region);
+ LogWriterUtils.getLogWriter().warning("Found region " + region);
assertNull(region);
}
};
@@ -561,14 +561,14 @@ public class HAClearDUnitTest extends DistributedTestCase
PORT2 = ((Integer)server2.invoke(HAClearDUnitTest.class,
"createServerCache")).intValue();
client1.invoke(HAClearDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)),
+ NetworkUtils.getServerHostName(Host.getHost(0)),
new Integer(PORT1), new Integer(PORT2), new Boolean(true),
new Boolean(true) });
client2.invoke(HAClearDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)),
+ NetworkUtils.getServerHostName(Host.getHost(0)),
new Integer(PORT1), new Integer(PORT2), new Boolean(true),
new Boolean(true) });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)),
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)),
new Integer(PORT1), new Integer(PORT2),
new Boolean(true), new Boolean(true));
}
@@ -618,7 +618,7 @@ public class HAClearDUnitTest extends DistributedTestCase
factory.setCacheListener(new CacheListenerAdapter() {
public void afterRegionClear(RegionEvent event)
{
- LogWriterSupport.getLogWriter().info("-------> afterRegionClear received");
+ LogWriterUtils.getLogWriter().info("-------> afterRegionClear received");
synchronized (HAClearDUnitTest.class) {
gotClearCallback = true;
HAClearDUnitTest.class.notifyAll();
@@ -628,7 +628,7 @@ public class HAClearDUnitTest extends DistributedTestCase
public void afterRegionDestroy(RegionEvent event)
{
synchronized (HAClearDUnitTest.class) {
- LogWriterSupport.getLogWriter().info("-------> afterRegionDestroy received");
+ LogWriterUtils.getLogWriter().info("-------> afterRegionDestroy received");
gotDestroyRegionCallback = true;
HAClearDUnitTest.class.notifyAll();
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
index 39acb7c..241ac39 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAConflationDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -114,7 +114,7 @@ public class HAConflationDUnitTest extends CacheTestCase
server1.invoke(ConflationDUnitTest.class, "setIsSlowStart");
server1.invoke(HAConflationDUnitTest.class, "makeDispatcherSlow");
client1.invoke(HAConflationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT1), new Boolean(true) });
+ new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT1), new Boolean(true) });
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
index 3bd7076..22310cf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADuplicateDUnitTest.java
@@ -41,7 +41,7 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -227,7 +227,7 @@ public class HADuplicateDUnitTest extends DistributedTestCase
PORT2 = ((Integer)server2.invoke(HADuplicateDUnitTest.class,
"createServerCache")).intValue();
client1.invoke(HADuplicateDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) });
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
index 2059d01..b77af55 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAEventIdPropagationDUnitTest.java
@@ -49,8 +49,8 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -128,7 +128,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
int PORT1 = ((Integer)server1.invoke(HAEventIdPropagationDUnitTest.class,
"createServerCache")).intValue();
client1.invoke(HAEventIdPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
}
/** create the server * */
@@ -219,7 +219,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
};
Wait.waitForCriterion(ev, 10 * 1000, 200, true);
synchronized(map) {
- LogWriterSupport.getLogWriter().info("assertThreadIdToSequenceIdMapisNotNullButEmpty: map size is " + map.size());
+ LogWriterUtils.getLogWriter().info("assertThreadIdToSequenceIdMapisNotNullButEmpty: map size is " + map.size());
assertTrue(map.size() == 1);
}
@@ -688,7 +688,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
public void afterCreate(EntryEvent event)
{
- LogWriterSupport.getLogWriter().fine(" entered after created with " + event.getKey());
+ LogWriterUtils.getLogWriter().fine(" entered after created with " + event.getKey());
boolean shouldNotify = false;
Object key = event.getKey();
if (key.equals(PUTALL_KEY1)) {
@@ -781,7 +781,7 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
public void afterCreate(EntryEvent event)
{
- LogWriterSupport.getLogWriter().fine(" entered after created with " + event.getKey());
+ LogWriterUtils.getLogWriter().fine(" entered after created with " + event.getKey());
boolean shouldNotify = false;
Object key = event.getKey();
if (key.equals(PUTALL_KEY1)) {
@@ -857,14 +857,14 @@ public class HAEventIdPropagationDUnitTest extends DistributedTestCase
public void afterRegionDestroy(RegionEvent event)
{
- LogWriterSupport.getLogWriter().info("Before Regionestroy in Server");
+ LogWriterUtils.getLogWriter().info("Before Regionestroy in Server");
eventId = ((RegionEventImpl)event).getEventId();
assertNotNull(eventId);
synchronized (lockObject) {
receivedOperation = true;
lockObject.notify();
}
- LogWriterSupport.getLogWriter().info("After RegionDestroy in Server");
+ LogWriterUtils.getLogWriter().info("After RegionDestroy in Server");
}
public void afterRegionClear(RegionEvent event)
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIBugDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIBugDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIBugDUnitTest.java
index d98c8f4..cfcb074 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIBugDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIBugDUnitTest.java
@@ -42,9 +42,9 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -140,7 +140,7 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
public void testDummy() throws Exception
{
- LogWriterSupport.getLogWriter().info("This is Dummy test for the GII");
+ LogWriterUtils.getLogWriter().info("This is Dummy test for the GII");
}
@@ -162,7 +162,7 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
factory.setCacheListener(regionListener);
RegionAttributes attrs = factory.create();
Region region = cache.createRegion(REGION_NAME, attrs);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Name of the region is : " + region.getFullPath());
HARegionQueueAttributes hattr = new HARegionQueueAttributes();
@@ -187,12 +187,12 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
AsyncInvocation[] async = new AsyncInvocation[4];
async[0] = vm0.invokeAsync(putFrmVm("vm0_2"));
t1.start();
- Threads.join(t1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(t1, 30 * 1000);
if (isTestFailed)
fail("HARegionQueue can not be created");
for (int count = 0; count < 1; count++) {
- Threads.join(async[count], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async[count], 30 * 1000);
if (async[count].exceptionOccurred()) {
Assert.fail("Got exception on " + count, async[count].getException());
}
@@ -208,7 +208,7 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
validationFlag = true;
validateResults(validationFlag);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"No. of keys that are missed by HARegion Queue during GII "
+ keys_set_after_gii.size());
@@ -222,7 +222,7 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
// int k = 0;
for (int i = 0; i < 1; i++) {
long totalPuts = ((Long)total_no_puts[i]).longValue() - 3 * NO_OF_PUTS;
- LogWriterSupport.getLogWriter().info("Total no of puts expectesd " + totalPuts);
+ LogWriterUtils.getLogWriter().info("Total no of puts expectesd " + totalPuts);
for (int j = 0; j < totalPuts; j++) {
keys_set_after_gii.add("vm" + i + "_2" + j);
@@ -243,7 +243,7 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
{
HARegion regionForQueue = (HARegion)cache.getRegion(Region.SEPARATOR
+ HARegionQueue.createRegionName(HAExpiryDUnitTest.regionQueueName));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Region Queue size : " + regionForQueue.keys().size());
Iterator itr = regionForQueue.entries(false).iterator();
while (itr.hasNext()) {
@@ -319,7 +319,7 @@ public class HAGIIBugDUnitTest extends DistributedTestCase
}
TOTAL_NO_OF_PUTS = TOTAL_NO_OF_PUTS + NO_OF_PUTS;
}
- LogWriterSupport.getLogWriter().info("Total no of puts : " + TOTAL_NO_OF_PUTS);
+ LogWriterUtils.getLogWriter().info("Total no of puts : " + TOTAL_NO_OF_PUTS);
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIDUnitTest.java
index 8d56211..74ac4de 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HAGIIDUnitTest.java
@@ -51,7 +51,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -151,7 +151,7 @@ public class HAGIIDUnitTest extends DistributedTestCase
PORT2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
//Start the client
client0.invoke(HAGIIDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT1),new Integer(PORT2)});
+ new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT1),new Integer(PORT2)});
}
public void testGIIRegionQueue()
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQAddOperationJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQAddOperationJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQAddOperationJUnitTest.java
index 88dbd54..ad649d3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQAddOperationJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQAddOperationJUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.cache.Conflatable;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.internal.logging.LogService;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -470,8 +470,8 @@ public class HARQAddOperationJUnitTest
t1.start();
t2.start();
- Threads.join(t1, 180 * 1000, null);
- Threads.join(t2, 180 * 1000, null);
+ ThreadUtils.join(t1, 180 * 1000);
+ ThreadUtils.join(t2, 180 * 1000);
if (testFailed)
fail("Test failed due to " + message);
@@ -528,8 +528,8 @@ public class HARQAddOperationJUnitTest
t2.start();
t1.start();
- Threads.join(t1, 180 * 1000, null);
- Threads.join(t2, 180 * 1000, null);
+ ThreadUtils.join(t1, 180 * 1000);
+ ThreadUtils.join(t2, 180 * 1000);
if (testFailed)
fail("Test failed due to " + message);
@@ -628,7 +628,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfThreads; k++) {
- Threads.join(threads[k], 180 * 1000, null);
+ ThreadUtils.join(threads[k], 180 * 1000);
}
this.logWriter
@@ -711,7 +711,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfThreads; k++) {
- Threads.join(threads[k], 180 * 1000, null);
+ ThreadUtils.join(threads[k], 180 * 1000);
}
if (testFailed)
@@ -779,7 +779,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfThreads; k++) {
- Threads.join(threads[k], 180 * 1000, null);
+ ThreadUtils.join(threads[k], 180 * 1000);
}
if (testFailed)
@@ -853,7 +853,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfThreads; k++) {
- Threads.join(threads[k], 180 * 1000, null);
+ ThreadUtils.join(threads[k], 180 * 1000);
}
if (testFailed)
@@ -902,7 +902,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfPuts; k++) {
- Threads.join(threads_peek_remove[k], 180 * 1000, null);
+ ThreadUtils.join(threads_peek_remove[k], 180 * 1000);
}
if (testFailed)
@@ -965,7 +965,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfThreads; k++) {
- Threads.join(threads[k], 180 * 1000, null);
+ ThreadUtils.join(threads[k], 180 * 1000);
}
if (testFailed)
@@ -1013,7 +1013,7 @@ public class HARQAddOperationJUnitTest
}
for (int k = 0; k < numOfPuts - 1; k++) {
- Threads.join(threads_peek_remove[k], 180 * 1000, null);
+ ThreadUtils.join(threads_peek_remove[k], 180 * 1000);
}
if (testFailed)
[24/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/LocatorDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/LocatorDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/LocatorDUnitTest.java
index 138d048..05d7559 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/LocatorDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/LocatorDUnitTest.java
@@ -51,15 +51,15 @@ import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.internal.logging.LocalLogWriter;
import com.gemstone.gemfire.internal.tcp.Connection;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedSystemSupport;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -105,10 +105,10 @@ public class LocatorDUnitTest extends DistributedTestCase {
// delete locator state files so they don't accidentally
// get used by other tests
if (port1 > 0) {
- DistributedSystemSupport.deleteLocatorStateFile(port1);
+ DistributedTestUtils.deleteLocatorStateFile(port1);
}
if (port2 > 0) {
- DistributedSystemSupport.deleteLocatorStateFile(port2);
+ DistributedTestUtils.deleteLocatorStateFile(port2);
}
}
@@ -131,13 +131,13 @@ public class LocatorDUnitTest extends DistributedTestCase {
VM vm3 = host.getVM(3);
port1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
+ DistributedTestUtils.deleteLocatorStateFile(port1);
- final String locators = NetworkSupport.getServerHostName(host) + "[" + port1 + "]";
+ final String locators = NetworkUtils.getServerHostName(host) + "[" + port1 + "]";
final Properties properties = new Properties();
properties.put("mcast-port", "0");
properties.put("start-locator", locators);
- properties.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ properties.put("log-level", LogWriterUtils.getDUnitLogLevel());
properties.put("security-peer-auth-init","com.gemstone.gemfire.distributed.AuthInitializer.create");
properties.put("security-peer-authenticator","com.gemstone.gemfire.distributed.MyAuthenticator.create");
properties.put(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
@@ -148,7 +148,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
DistributionManager.NORMAL_DM_TYPE, system.getDistributedMember().getVmKind());
properties.remove("start-locator");
- properties.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ properties.put("log-level", LogWriterUtils.getDUnitLogLevel());
properties.put("locators", locators);
SerializableRunnable startSystem = new SerializableRunnable("start system") {
public void run() {
@@ -207,13 +207,13 @@ public class LocatorDUnitTest extends DistributedTestCase {
});
properties.put("start-locator", locators);
- properties.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ properties.put("log-level", LogWriterUtils.getDUnitLogLevel());
system = (InternalDistributedSystem)DistributedSystem.connect(properties);
System.out.println("done connecting distributed system");
assertEquals("should be the coordinator", system.getDistributedMember(), MembershipManagerHelper.getCoordinator(system));
NetView view = MembershipManagerHelper.getMembershipManager(system).getView();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("view after becoming coordinator is " + view);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("view after becoming coordinator is " + view);
assertNotSame("should not be the first member in the view ("+view+")", system.getDistributedMember(), view.get(0));
service = DistributedLockService.create("test service", system);
@@ -261,9 +261,9 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
this.port2 = port2; // for cleanup in tearDown2
- DistributedSystemSupport.deleteLocatorStateFile(port1);
- DistributedSystemSupport.deleteLocatorStateFile(port2);
- final String host0 = NetworkSupport.getServerHostName(host);
+ DistributedTestUtils.deleteLocatorStateFile(port1);
+ DistributedTestUtils.deleteLocatorStateFile(port2);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]," +
host0 + "[" + port2 + "]";
final Properties properties = new Properties();
@@ -272,7 +272,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
properties.put("enable-network-partition-detection", "false");
properties.put("disable-auto-reconnect", "true");
properties.put("member-timeout", "2000");
- properties.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ properties.put("log-level", LogWriterUtils.getDUnitLogLevel());
properties.put(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
SerializableCallable startLocator1 = new SerializableCallable("start locator1") {
@@ -316,7 +316,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
if (async1 != null) {
async1.join(45000);
if (async1.isAlive()) {
- Threads.dumpAllStacks();
+ ThreadUtils.dumpAllStacks();
}
if (async2 != null) {
async2.join();
@@ -371,8 +371,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
VM vm3 = host.getVM(3);
port1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
- final String locators = NetworkSupport.getServerHostName(host) + "[" + port1 + "]";
+ DistributedTestUtils.deleteLocatorStateFile(port1);
+ final String locators = NetworkUtils.getServerHostName(host) + "[" + port1 + "]";
final Properties properties = new Properties();
properties.put("mcast-port", "0");
properties.put("locators", locators);
@@ -494,8 +494,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
this.port2 = port2; // for cleanup in tearDown2()
- DistributedSystemSupport.deleteLocatorStateFile(port1, port2);
- final String host0 = NetworkSupport.getServerHostName(host);
+ DistributedTestUtils.deleteLocatorStateFile(port1, port2);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]," +
host0 + "[" + port2 + "]";
final Properties properties = new Properties();
@@ -504,7 +504,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
properties.put("enable-network-partition-detection", "true");
properties.put("disable-auto-reconnect", "true");
properties.put("member-timeout", "2000");
- properties.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ properties.put("log-level", LogWriterUtils.getDUnitLogLevel());
// properties.put("log-level", "fine");
properties.put(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
@@ -553,7 +553,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
assertEquals(sys.getDistributedMember(), MembershipManagerHelper.getCoordinator(sys));
// crash the second vm and the locator. Should be okay
- DistributedSystemSupport.crashDistributedSystem(vm2);
+ DistributedTestUtils.crashDistributedSystem(vm2);
locvm.invoke(crashLocator);
assertTrue("Distributed system should not have disconnected",
@@ -566,14 +566,14 @@ public class LocatorDUnitTest extends DistributedTestCase {
// disconnect the first vm and demonstrate that the third vm and the
// locator notice the failure and exit
- DistributedSystemSupport.crashDistributedSystem(vm1);
+ DistributedTestUtils.crashDistributedSystem(vm1);
/* This vm is watching vm1, which is watching vm2 which is watching locvm.
* It will take 3 * (3 * member-timeout) milliseconds to detect the full
* failure and eject the lost members from the view.
*/
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting for my distributed system to disconnect due to partition detection");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting for my distributed system to disconnect due to partition detection");
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
return !sys.isConnected();
@@ -629,8 +629,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
this.port2 = port2;
- DistributedSystemSupport.deleteLocatorStateFile(port1, port2);
- final String host0 = NetworkSupport.getServerHostName(host);
+ DistributedTestUtils.deleteLocatorStateFile(port1, port2);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "],"
+ host0 + "[" + port2 + "]";
final Properties properties = new Properties();
@@ -706,7 +706,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
if (!Locator.getLocators().isEmpty()) {
// log this for debugging purposes before throwing assertion error
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().warning("found locator " + Locator.getLocators().iterator().next());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().warning("found locator " + Locator.getLocators().iterator().next());
}
assertTrue("locator is not stopped", Locator.getLocators().isEmpty());
@@ -744,7 +744,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
try {
locvm.invoke(stopLocator);
} catch (Exception e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().severe("failed to stop locator in vm 3", e);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().severe("failed to stop locator in vm 3", e);
}
}
}
@@ -776,8 +776,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
this.port2 = port2;
- DistributedSystemSupport.deleteLocatorStateFile(port1, port2);
- final String host0 = NetworkSupport.getServerHostName(host);
+ DistributedTestUtils.deleteLocatorStateFile(port1, port2);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]," + host0 + "[" + port2 + "]";
final Properties properties = new Properties();
@@ -786,7 +786,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
properties.put("enable-network-partition-detection", "true");
properties.put("disable-auto-reconnect", "true");
properties.put("member-timeout", "2000");
- properties.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ properties.put("log-level", LogWriterUtils.getDUnitLogLevel());
properties.put(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
SerializableRunnable stopLocator = getStopLocatorRunnable();
@@ -915,8 +915,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
this.port2 = port2;
- DistributedSystemSupport.deleteLocatorStateFile(port1, port2);
- final String host0 = NetworkSupport.getServerHostName(host);
+ DistributedTestUtils.deleteLocatorStateFile(port1, port2);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "],"
+ host0 + "[" + port2 + "]";
final Properties properties = new Properties();
@@ -1036,8 +1036,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
Host host = Host.getHost(0);
int port =
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
- String locators = NetworkSupport.getServerHostName(host) + "[" + port + "]";
+ DistributedTestUtils.deleteLocatorStateFile(port1);
+ String locators = NetworkUtils.getServerHostName(host) + "[" + port + "]";
Properties props = new Properties();
props.setProperty("mcast-port", "0");
props.setProperty("locators", locators);
@@ -1099,8 +1099,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
final int port =
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
- final String locators = NetworkSupport.getServerHostName(host) + "[" + port + "]";
+ DistributedTestUtils.deleteLocatorStateFile(port1);
+ final String locators = NetworkUtils.getServerHostName(host) + "[" + port + "]";
final String uniqueName = getUniqueName();
vm0.invoke(new SerializableRunnable("Start locator " + locators) {
@@ -1139,7 +1139,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
system = (InternalDistributedSystem)DistributedSystem.connect(props);
final DistributedMember coord = MembershipManagerHelper.getCoordinator(system);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("coordinator before termination of locator is " + coord);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("coordinator before termination of locator is " + coord);
vm0.invoke(getStopLocatorRunnable());
@@ -1155,7 +1155,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
};
Wait.waitForCriterion(ev, 15 * 1000, 200, true);
DistributedMember newCoord = MembershipManagerHelper.getCoordinator(system);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("coordinator after shutdown of locator was " +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("coordinator after shutdown of locator was " +
newCoord);
if (coord.equals(newCoord)) {
fail("another member should have become coordinator after the locator was stopped");
@@ -1211,8 +1211,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
final int port =
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
- final String locators = NetworkSupport.getServerHostName(host) + "[" + port + "]";
+ DistributedTestUtils.deleteLocatorStateFile(port1);
+ final String locators = NetworkUtils.getServerHostName(host) + "[" + port + "]";
vm0.invoke(getStartSBLocatorRunnable(port, getUniqueName()+"1"));
try {
@@ -1236,7 +1236,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
system = (InternalDistributedSystem)getSystem(props);
final DistributedMember coord = MembershipManagerHelper.getCoordinator(system);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("coordinator before termination of locator is " + coord);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("coordinator before termination of locator is " + coord);
vm0.invoke(getStopLocatorRunnable());
@@ -1252,7 +1252,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
};
Wait.waitForCriterion(ev, 15000, 200, true);
DistributedMember newCoord = MembershipManagerHelper.getCoordinator(system);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("coordinator after shutdown of locator was " +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("coordinator after shutdown of locator was " +
newCoord);
if (newCoord == null || coord.equals(newCoord)) {
fail("another member should have become coordinator after the locator was stopped: "
@@ -1344,8 +1344,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = freeTCPPorts[1];
this.port2 = port2;
- DistributedSystemSupport.deleteLocatorStateFile(port1, port2);
- final String host0 = NetworkSupport.getServerHostName(host);
+ DistributedTestUtils.deleteLocatorStateFile(port1, port2);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]," +
host0 + "[" + port2 + "]";
@@ -1460,10 +1460,10 @@ public class LocatorDUnitTest extends DistributedTestCase {
this.port1 = port1;
final int port2 = freeTCPPorts[1];
this.port2 = port2;
- DistributedSystemSupport.deleteLocatorStateFile(port1, port2);
+ DistributedTestUtils.deleteLocatorStateFile(port1, port2);
final int mcastport = AvailablePort.getRandomAvailablePort(AvailablePort.MULTICAST);
- final String host0 = NetworkSupport.getServerHostName(host);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]," +
host0 + "[" + port2 + "]";
final String uniqueName = getUniqueName();
@@ -1475,7 +1475,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty("mcast-port", String.valueOf(mcastport));
props.setProperty("locators", locators);
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
props.setProperty("mcast-ttl", "0");
props.setProperty("enable-network-partition-detection", "true");
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
@@ -1494,7 +1494,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty("mcast-port", String.valueOf(mcastport));
props.setProperty("locators", locators);
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
props.setProperty("mcast-ttl", "0");
props.setProperty("enable-network-partition-detection", "true");
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
@@ -1512,7 +1512,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty("mcast-port", String.valueOf(mcastport));
props.setProperty("locators", locators);
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
props.setProperty("mcast-ttl", "0");
props.setProperty("enable-network-partition-detection", "true");
DistributedSystem.connect(props);
@@ -1525,7 +1525,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty("mcast-port", String.valueOf(mcastport));
props.setProperty("locators", locators);
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
props.setProperty("mcast-ttl", "0");
props.setProperty("enable-network-partition-detection", "true");
@@ -1580,12 +1580,12 @@ public class LocatorDUnitTest extends DistributedTestCase {
port1 =
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
+ DistributedTestUtils.deleteLocatorStateFile(port1);
File logFile = new File("");
Locator locator = Locator.startLocator(port1, logFile);
try {
- final String locators = NetworkSupport.getServerHostName(host) + "[" + port1 + "]";
+ final String locators = NetworkUtils.getServerHostName(host) + "[" + port1 + "]";
Properties props = new Properties();
props.setProperty("mcast-port", "0");
@@ -1610,8 +1610,8 @@ public class LocatorDUnitTest extends DistributedTestCase {
try {
port1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
- final String locators = NetworkSupport.getServerHostName(host) + "[" + port1 + "]";
+ DistributedTestUtils.deleteLocatorStateFile(port1);
+ final String locators = NetworkUtils.getServerHostName(host) + "[" + port1 + "]";
final Properties properties = new Properties();
properties.put("mcast-port", "0");
properties.put("locators", locators);
@@ -1687,7 +1687,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
port1 =
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
File logFile1 = new File("");
- DistributedSystemSupport.deleteLocatorStateFile(port1);
+ DistributedTestUtils.deleteLocatorStateFile(port1);
Locator locator1 = Locator.startLocator(port1, logFile1);
try {
@@ -1696,7 +1696,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
File logFile2 = new File("");
- DistributedSystemSupport.deleteLocatorStateFile(port2);
+ DistributedTestUtils.deleteLocatorStateFile(port2);
try {
Locator locator2 = Locator.startLocator(port2, logFile2);
@@ -1704,7 +1704,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
} catch (IllegalStateException expected) {
}
- final String host0 = NetworkSupport.getServerHostName(host);
+ final String host0 = NetworkUtils.getServerHostName(host);
final String locators = host0 + "[" + port1 + "]," +
host0 + "[" + port2 + "]";
@@ -1714,7 +1714,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty("mcast-port", "0");
props.setProperty("locators", locators);
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
DistributedSystem.connect(props);
}
};
@@ -1749,7 +1749,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
disconnectAllFromDS();
port1 =
AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- DistributedSystemSupport.deleteLocatorStateFile(port1);
+ DistributedTestUtils.deleteLocatorStateFile(port1);
File logFile = new File("");
File stateFile = new File("locator"+port1+"state.dat");
VM vm0 = Host.getHost(0).getVM(0);
@@ -1761,7 +1761,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
stateFile.delete();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Starting locator");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Starting locator");
Locator locator = Locator.startLocatorAndDS(port1, logFile, p);
try {
@@ -1773,10 +1773,10 @@ public class LocatorDUnitTest extends DistributedTestCase {
};
vm0.invoke(connect);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Stopping locator");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Stopping locator");
locator.stop();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Starting locator");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Starting locator");
locator = Locator.startLocatorAndDS(port1, logFile, p);
vm0.invoke(new SerializableRunnable("disconnect") {
@@ -1835,7 +1835,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
System.setProperty("p2p.joinTimeout", "1000");
Properties locProps = new Properties();
locProps.put("mcast-port", "0");
- locProps.put("log-level", LogWriterSupport.getDUnitLogLevel());
+ locProps.put("log-level", LogWriterUtils.getDUnitLogLevel());
Locator.startLocatorAndDS(port, logFile, locProps);
} catch (IOException ex) {
com.gemstone.gemfire.test.dunit.Assert.fail("While starting locator on port " + port, ex);
@@ -1902,7 +1902,7 @@ public class LocatorDUnitTest extends DistributedTestCase {
public void quorumLost(Set<InternalDistributedMember> failures,
List<InternalDistributedMember> remaining) {
quorumLostInvoked = true;
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("quorumLost invoked in test code");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("quorumLost invoked in test code");
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/SystemAdminDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/SystemAdminDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/SystemAdminDUnitTest.java
index ef45acb..e80142a 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/SystemAdminDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/SystemAdminDUnitTest.java
@@ -30,8 +30,8 @@ import com.gemstone.gemfire.distributed.internal.DistributionManager;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.SystemAdmin;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class SystemAdminDUnitTest extends DistributedTestCase {
@@ -56,7 +56,7 @@ public class SystemAdminDUnitTest extends DistributedTestCase {
system = null;
InternalDistributedSystem sys = InternalDistributedSystem.getAnyInstance();
if (sys != null && sys.isConnected()) {
- LogWriterSupport.getLogWriter().info("disconnecting(3)");
+ LogWriterUtils.getLogWriter().info("disconnecting(3)");
sys.disconnect();
}
}
@@ -64,7 +64,7 @@ public class SystemAdminDUnitTest extends DistributedTestCase {
public void testPrintStacks() throws Exception {
// create a gemfire.properties that lets SystemAdmin find the dunit locator
- Properties p = DistributedTestSupport.getAllDistributedSystemProperties(getDistributedSystemProperties());
+ Properties p = DistributedTestUtils.getAllDistributedSystemProperties(getDistributedSystemProperties());
try {
SystemAdmin.setDistributedSystemProperties(p);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/ConsoleDistributionManagerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/ConsoleDistributionManagerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/ConsoleDistributionManagerDUnitTest.java
index cfc978f..166d5ac 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/ConsoleDistributionManagerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/ConsoleDistributionManagerDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.internal.admin.StatResource;
import com.gemstone.gemfire.internal.admin.remote.RemoteTransportConfig;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -67,7 +67,7 @@ public class ConsoleDistributionManagerDUnitTest
// private volatile Alert lastAlert = null;
public void alert(Alert alert) {
- LogWriterSupport.getLogWriter().info("DEBUG: alert=" + alert);
+ LogWriterUtils.getLogWriter().info("DEBUG: alert=" + alert);
// this.lastAlert = alert;
}
@@ -97,7 +97,7 @@ public class ConsoleDistributionManagerDUnitTest
}
// create a GfManagerAgent in the master vm.
this.agent = GfManagerAgentFactory.
- getManagerAgent(new GfManagerAgentConfig(null, transport, LogWriterSupport.getLogWriter(), Alert.SEVERE, this, null));
+ getManagerAgent(new GfManagerAgentConfig(null, transport, LogWriterUtils.getLogWriter(), Alert.SEVERE, this, null));
if (!agent.isConnected()) {
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
@@ -248,7 +248,7 @@ public class ConsoleDistributionManagerDUnitTest
Region[] roots = apps[i].getRootRegions();
if (roots.length == 0) {
- LogWriterSupport.getLogWriter().info("DEBUG: testApplications: apps[" + i + "]=" + apps[i] + " did not have a root region");
+ LogWriterUtils.getLogWriter().info("DEBUG: testApplications: apps[" + i + "]=" + apps[i] + " did not have a root region");
} else {
Region root = roots[0];
assertNotNull(root);
@@ -289,7 +289,7 @@ public class ConsoleDistributionManagerDUnitTest
assertTrue(!node.isPrimitiveOrString());
EntryValueNode[] fields = node.getChildren();
assertNotNull(fields);
- LogWriterSupport.getLogWriter().warning("The tests use StringBuffers for values which might be implmented differently in jdk 1.5");
+ LogWriterUtils.getLogWriter().warning("The tests use StringBuffers for values which might be implmented differently in jdk 1.5");
// assertTrue(fields.length > 0);
/// test destruction in the last valid app
@@ -366,7 +366,7 @@ public class ConsoleDistributionManagerDUnitTest
region.create(entryName, value);
- LogWriterSupport.getLogWriter().info("Put value " + value + " in entry " +
+ LogWriterUtils.getLogWriter().info("Put value " + value + " in entry " +
entryName + " in region '" +
region.getFullPath() +"'");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/DistributionManagerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/DistributionManagerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/DistributionManagerDUnitTest.java
index cdd81f3..2164fdc 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/DistributionManagerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/DistributionManagerDUnitTest.java
@@ -49,7 +49,7 @@ import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -151,7 +151,7 @@ public class DistributionManagerDUnitTest extends DistributedTestCase {
mgr = MembershipManagerHelper.getMembershipManager(sys);
sys.disconnect();
InternalDistributedMember idm2 = mgr.getLocalMember();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("original ID=" + idm +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("original ID=" + idm +
" and after connecting=" + idm2);
assertTrue("should not have used a different udp port",
idm.getPort() == idm2.getPort());
@@ -176,7 +176,7 @@ public class DistributionManagerDUnitTest extends DistributedTestCase {
try {
InternalDistributedMember mbr = new InternalDistributedMember(
- NetworkSupport.getIPLiteral(), 12345);
+ NetworkUtils.getIPLiteral(), 12345);
// first make sure we can't add this as a surprise member (bug #44566)
@@ -186,8 +186,8 @@ public class DistributionManagerDUnitTest extends DistributedTestCase {
int oldViewId = mbr.getVmViewId();
mbr.setVmViewId((int)mgr.getView().getViewId()-1);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("current membership view is " + mgr.getView());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("created ID " + mbr + " with view ID " + mbr.getVmViewId());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("current membership view is " + mgr.getView());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("created ID " + mbr + " with view ID " + mbr.getVmViewId());
sys.getLogWriter().info("<ExpectedException action=add>attempt to add old member</ExpectedException>");
sys.getLogWriter().info("<ExpectedException action=add>Removing shunned GemFire node</ExpectedException>");
try {
@@ -525,14 +525,14 @@ public class DistributionManagerDUnitTest extends DistributedTestCase {
try {
getSystem(props);
} catch (IllegalArgumentException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("caught expected exception (1)", e);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("caught expected exception (1)", e);
}
// use an invalid address
props.setProperty(DistributionConfig.BIND_ADDRESS_NAME, "bruce.schuchardt");
try {
getSystem(props);
} catch (IllegalArgumentException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("caught expected exception (2_", e);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("caught expected exception (2_", e);
}
// use a valid bind address
props.setProperty(DistributionConfig.BIND_ADDRESS_NAME, InetAddress.getLocalHost().getCanonicalHostName());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/deadlock/GemFireDeadlockDetectorDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/deadlock/GemFireDeadlockDetectorDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/deadlock/GemFireDeadlockDetectorDUnitTest.java
index 1574ec0..bc3bee6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/deadlock/GemFireDeadlockDetectorDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/deadlock/GemFireDeadlockDetectorDUnitTest.java
@@ -37,7 +37,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -117,7 +117,7 @@ public class GemFireDeadlockDetectorDUnitTest extends CacheTestCase {
Thread.sleep(5000);
GemFireDeadlockDetector detect = new GemFireDeadlockDetector();
LinkedList<Dependency> deadlock = detect.find().findCycle();
- LogWriterSupport.getLogWriter().info("Deadlock=" + DeadlockDetector.prettyFormat(deadlock));
+ LogWriterUtils.getLogWriter().info("Deadlock=" + DeadlockDetector.prettyFormat(deadlock));
assertEquals(8, deadlock.size());
stopStuckThreads();
async1.getResult(30000);
@@ -164,7 +164,7 @@ public class GemFireDeadlockDetectorDUnitTest extends CacheTestCase {
}
assertTrue(deadlock != null);
- LogWriterSupport.getLogWriter().info("Deadlock=" + DeadlockDetector.prettyFormat(deadlock));
+ LogWriterUtils.getLogWriter().info("Deadlock=" + DeadlockDetector.prettyFormat(deadlock));
assertEquals(4, deadlock.size());
stopStuckThreads();
disconnectAllFromDS();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/locks/CollaborationJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/locks/CollaborationJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/locks/CollaborationJUnitTest.java
index c570482..6985044 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/locks/CollaborationJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/distributed/internal/locks/CollaborationJUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.CancelCriterion;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.SystemFailure;
import com.gemstone.gemfire.internal.logging.LocalLogWriter;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -154,7 +154,7 @@ public class CollaborationJUnitTest {
// let threadA release so that threadB gets lock
this.flagTestBlocksUntilRelease = false;
- Threads.join(threadA, 30 * 1000, null);
+ ThreadUtils.join(threadA, 30 * 1000);
// make sure threadB is doing what it's supposed to do...
ev = new WaitCriterion() {
@@ -171,7 +171,7 @@ public class CollaborationJUnitTest {
// threadB must have lock now... let threadB release
assertTrue(this.collaboration.hasCurrentTopic(threadB));
this.flagTestBlocksUntilRelease = false;
- Threads.join(threadB, 30 * 1000, null);
+ ThreadUtils.join(threadB, 30 * 1000);
// collaboration should be free now
assertFalse(this.collaboration.hasCurrentTopic(threadA));
@@ -342,7 +342,7 @@ public class CollaborationJUnitTest {
// release threadA
this.threadAFlag_TestLateComerJoinsIn = false;
- Threads.join(threadA, 30 * 1000, null);
+ ThreadUtils.join(threadA, 30 * 1000);
assertFalse(this.collaboration.hasCurrentTopic(threadA));
assertTrue(this.collaboration.hasCurrentTopic(threadB));
assertFalse(this.collaboration.hasCurrentTopic(threadC));
@@ -352,7 +352,7 @@ public class CollaborationJUnitTest {
// release threadB
this.threadBFlag_TestLateComerJoinsIn = false;
- Threads.join(threadB, 30 * 1000, null);
+ ThreadUtils.join(threadB, 30 * 1000);
assertFalse(this.collaboration.hasCurrentTopic(threadB));
assertFalse(this.collaboration.hasCurrentTopic(threadC));
assertTrue(this.collaboration.hasCurrentTopic(threadD));
@@ -361,7 +361,7 @@ public class CollaborationJUnitTest {
// release threadD
this.threadDFlag_TestLateComerJoinsIn = false;
- Threads.join(threadD, 30 * 1000, null);
+ ThreadUtils.join(threadD, 30 * 1000);
ev = new WaitCriterion() {
@Override
public boolean done() {
@@ -380,7 +380,7 @@ public class CollaborationJUnitTest {
// release threadC
this.threadCFlag_TestLateComerJoinsIn = false;
- Threads.join(threadC, 30 * 1000, null);
+ ThreadUtils.join(threadC, 30 * 1000);
assertFalse(this.collaboration.hasCurrentTopic(threadC));
assertFalse(this.collaboration.isCurrentTopic(topicA));
assertFalse(this.collaboration.isCurrentTopic(topicB));
@@ -464,7 +464,7 @@ public class CollaborationJUnitTest {
}
for (int t = 0; t < threads.length; t++) {
- Threads.join(threads[t], 30 * 1000, null);
+ ThreadUtils.join(threads[t], 30 * 1000);
}
// assert that all topics are acquired in order
@@ -579,7 +579,7 @@ public class CollaborationJUnitTest {
// after starting thread, hasCurrentTopic(thread) returns true
assertTrue(this.collaboration.hasCurrentTopic(thread));
this.flagTestThreadHasCurrentTopic = false;
- Threads.join(thread, 30 * 1000, null);
+ ThreadUtils.join(thread, 30 * 1000);
// after thread finishes, hasCurrentTopic(thread) returns false
assertTrue(!this.collaboration.hasCurrentTopic(thread));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXDebugDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXDebugDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXDebugDUnitTest.java
index ece49c6..db36aac 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXDebugDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXDebugDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.internal.cache.control.InternalResourceManager;
import com.gemstone.gemfire.internal.cache.execute.CustomerIDPartitionResolver;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -127,7 +127,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
assertNotNull(cache);
Region pr = cache.createRegion(partitionedRegionName, attr.create());
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + partitionedRegionName
+ " created Successfully :" + pr.toString());
}
@@ -148,7 +148,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
assertNotNull(cache);
Region pr = cache.getRegion(partitionedRegionName);
assertNotNull(pr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Destroying Partitioned Region " + partitionedRegionName);
pr.destroyRegion();
}
@@ -165,7 +165,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
// af.create());
Region rr = cache.createRegion(replicatedRegionName, af.create());
assertNotNull(rr);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Replicated Region " + replicatedRegionName + " created Successfully :"
+ rr.toString());
}
@@ -196,7 +196,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.put");
+ LogWriterUtils.getLogWriter().info(" calling pr.put");
pr1.put(dummy, "1_entry__" + i);
}
@@ -209,7 +209,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.put in tx 1");
+ LogWriterUtils.getLogWriter().info(" calling pr.put in tx 1");
pr1.put(dummy, "2_entry__" + i);
}
ctx.commit();
@@ -218,7 +218,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get");
+ LogWriterUtils.getLogWriter().info(" calling pr.get");
assertEquals("2_entry__" + i, pr1.get(dummy));
}
@@ -227,7 +227,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.put in tx 2");
+ LogWriterUtils.getLogWriter().info(" calling pr.put in tx 2");
pr1.put(dummy, "3_entry__" + i);
}
ctx.rollback();
@@ -236,7 +236,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get");
+ LogWriterUtils.getLogWriter().info(" calling pr.get");
assertEquals("2_entry__" + i, pr1.get(dummy));
}
@@ -245,7 +245,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.destroy in tx 3");
+ LogWriterUtils.getLogWriter().info(" calling pr.destroy in tx 3");
pr1.destroy(dummy);
}
ctx.commit();
@@ -254,7 +254,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get");
+ LogWriterUtils.getLogWriter().info(" calling pr.get");
assertEquals(null, pr1.get(dummy));
}
@@ -264,7 +264,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
public Object call() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache
.getRegion("pregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(0, pr1.getLocalSize());
return null;
@@ -303,7 +303,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 6; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling non-tx put");
+ LogWriterUtils.getLogWriter().info(" calling non-tx put");
pr1.put(dummy, "1_entry__" + i);
rr1.put(dummy, "1_entry__" + i);
}
@@ -315,19 +315,19 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr1.destroy in tx key=" + dummy);
pr1.destroy(dummy);
- LogWriterSupport.getLogWriter().info(" calling rr1.destroy in tx key=" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr1.destroy in tx key=" + i);
rr1.destroy(dummy);
}
for (int i = 4; i <= 6; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr1.invalidate in tx key=" + dummy);
pr1.invalidate(dummy);
- LogWriterSupport.getLogWriter().info(" calling rr1.invalidate in tx key=" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr1.invalidate in tx key=" + i);
rr1.invalidate(dummy);
}
ctx.commit();
@@ -336,9 +336,9 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 6; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr1.get");
+ LogWriterUtils.getLogWriter().info(" calling pr1.get");
assertEquals(null, pr1.get(dummy));
- LogWriterSupport.getLogWriter().info(" calling rr1.get");
+ LogWriterUtils.getLogWriter().info(" calling rr1.get");
assertEquals(null, rr1.get(i));
}
return null;
@@ -353,10 +353,10 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
public Object call() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
Region rr1 = cache.getRegion("rregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr1.getLocalSize " + pr1.getLocalSize());
assertEquals(2, pr1.getLocalSize());
- LogWriterSupport.getLogWriter().info(" calling rr1.size " + rr1.size());
+ LogWriterUtils.getLogWriter().info(" calling rr1.size " + rr1.size());
assertEquals(3, rr1.size());
return null;
}
@@ -391,9 +391,9 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.put non-tx PR1_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling pr.put non-tx PR1_entry__" + i);
pr1.put(dummy, "PR1_entry__" + i);
- LogWriterSupport.getLogWriter().info(" calling rr.put non-tx RR1_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr.put non-tx RR1_entry__" + i);
rr1.put(new Integer(i), "RR1_entry__" + i);
}
@@ -406,9 +406,9 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.put in tx PR2_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling pr.put in tx PR2_entry__" + i);
pr1.put(dummy, "PR2_entry__" + i);
- LogWriterSupport.getLogWriter().info(" calling rr.put in tx RR2_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr.put in tx RR2_entry__" + i);
rr1.put(new Integer(i), "RR2_entry__" + i);
}
ctx.commit();
@@ -417,9 +417,9 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get PR2_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling pr.get PR2_entry__" + i);
assertEquals("PR2_entry__" + i, pr1.get(dummy));
- LogWriterSupport.getLogWriter().info(" calling rr.get RR2_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr.get RR2_entry__" + i);
assertEquals("RR2_entry__" + i, rr1.get(new Integer(i)));
}
return null;
@@ -433,12 +433,12 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(2, pr1.getLocalSize());
Region rr1 = cache.getRegion("rregion1");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(" calling rr.getLocalSize " + rr1.size());
assertEquals(3, rr1.size());
return null;
@@ -475,7 +475,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.put in tx 1");
+ LogWriterUtils.getLogWriter().info(" calling pr.put in tx 1");
pr1.put(dummy, "2_entry__" + i);
}
ctx.commit();
@@ -484,7 +484,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
+ LogWriterUtils.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
assertEquals("2_entry__" + i, pr1.get(dummy));
}
return null;
@@ -498,7 +498,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
public Object call() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
CacheTransactionManager ctx = cache.getCacheTransactionManager();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(2, pr1.getLocalSize());
return null;
@@ -526,7 +526,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.put in tx for rollback no_entry__" + i);
pr1.put(dummy, "no_entry__" + i);
}
@@ -536,7 +536,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.get after rollback " + pr1.get(dummy));
assertEquals("2_entry__" + i, pr1.get(dummy));
}
@@ -570,10 +570,10 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.create in tx 1");
+ LogWriterUtils.getLogWriter().info(" calling pr.create in tx 1");
pr1.create(dummy, "2_entry__" + i);
- LogWriterSupport.getLogWriter().info(" calling rr.create " + "2_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr.create " + "2_entry__" + i);
rr1.create(new Integer(i), "2_entry__" + i);
}
ctx.commit();
@@ -582,10 +582,10 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
+ LogWriterUtils.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
assertEquals("2_entry__" + i, pr1.get(dummy));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling rr.get " + rr1.get(new Integer(i)));
assertEquals("2_entry__" + i, rr1.get(new Integer(i)));
}
@@ -600,12 +600,12 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
Region rr1 = cache.getRegion("rregion1");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(" calling rr.getLocalSize " + rr1.size());
assertEquals(3, rr1.size());
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(2, pr1.getLocalSize());
return null;
@@ -650,10 +650,10 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
+ LogWriterUtils.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
assertEquals("2_entry__" + i, pr1.get(dummy));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling rr.get " + rr1.get(new Integer(i)));
assertEquals("2_entry__" + i, rr1.get(new Integer(i)));
}
@@ -668,12 +668,12 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
Region rr1 = cache.getRegion("rregion1");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(" calling rr.getLocalSize " + rr1.size());
assertEquals(3, rr1.size());
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(2, pr1.getLocalSize());
return null;
@@ -714,7 +714,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
+ LogWriterUtils.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
assertEquals("2_entry__" + i, pr1.get(dummy));
}
@@ -730,7 +730,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(2, pr1.getLocalSize());
return null;
@@ -781,7 +781,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
// verify the data
for (int i = 1; i <= 3; i++) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling rr.get " + rr1.get(new Integer(i)));
assertEquals(null, rr1.get(new Integer(i)));
}
@@ -800,7 +800,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
Region rr1 = cache.getRegion("rregion1");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(" calling rr.getLocalSize " + rr1.size());
assertEquals(0, rr1.size());
return null;
@@ -841,7 +841,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
for (int i = 1; i <= 3; i++) {
DummyKeyBasedRoutingResolver dummy = new DummyKeyBasedRoutingResolver(
i);
- LogWriterSupport.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
+ LogWriterUtils.getLogWriter().info(" calling pr.get " + pr1.get(dummy));
assertEquals(null, pr1.get(dummy));
}
return null;
@@ -855,7 +855,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion) cache.getRegion("pregion1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling pr.getLocalSize " + pr1.getLocalSize());
assertEquals(0, pr1.getLocalSize());
return null;
@@ -886,14 +886,14 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
ctx.setDistributed(true);
ctx.begin();
for (int i = 1; i <= 3; i++) {
- LogWriterSupport.getLogWriter().info(" calling rr.put " + "2_entry__" + i);
+ LogWriterUtils.getLogWriter().info(" calling rr.put " + "2_entry__" + i);
rr1.put(new Integer(i), "2_entry__" + i);
}
ctx.commit();
// verify the data
for (int i = 1; i <= 3; i++) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling rr.get " + rr1.get(new Integer(i)));
assertEquals("2_entry__" + i, rr1.get(new Integer(i)));
}
@@ -912,7 +912,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
@Override
public Object call() throws CacheException {
Region rr1 = cache.getRegion("rregion1");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(" calling rr.getLocalSize " + rr1.size());
assertEquals(3, rr1.size());
return null;
@@ -930,7 +930,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
ctx.setDistributed(true);
ctx.begin();
for (int i = 1; i <= 3; i++) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling rr.put for rollback no_entry__" + i);
rr1.put(new Integer(i), "no_entry__" + i);
}
@@ -939,7 +939,7 @@ public class DistTXDebugDUnitTest extends CacheTestCase {
// verify the data
for (int i = 1; i <= 3; i++) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
" calling rr.get after rollback "
+ rr1.get(new Integer(i)));
assertEquals("2_entry__" + i, rr1.get(new Integer(i)));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXPersistentDebugDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXPersistentDebugDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXPersistentDebugDUnitTest.java
index 997effa..2cfeae7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXPersistentDebugDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistTXPersistentDebugDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.internal.cache.TXManagerImpl;
import com.gemstone.gemfire.internal.cache.execute.data.CustId;
import com.gemstone.gemfire.internal.cache.execute.data.Customer;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
public class DistTXPersistentDebugDUnitTest extends DistTXDebugDUnitTest {
@@ -107,21 +107,21 @@ public class DistTXPersistentDebugDUnitTest extends DistTXDebugDUnitTest {
public Object call() throws Exception {
CacheTransactionManager mgr = cache.getCacheTransactionManager();
mgr.setDistributed(true);
- LogWriterSupport.getLogWriter().fine("SJ:TX BEGIN");
+ LogWriterUtils.getLogWriter().fine("SJ:TX BEGIN");
mgr.begin();
Region<CustId, Customer> prRegion = cache.getRegion(regionName);
CustId custIdOne = new CustId(1);
Customer customerOne = new Customer("name1", "addr1");
- LogWriterSupport.getLogWriter().fine("SJ:TX PUT 1");
+ LogWriterUtils.getLogWriter().fine("SJ:TX PUT 1");
prRegion.put(custIdOne, customerOne);
CustId custIdTwo = new CustId(2);
Customer customerTwo = new Customer("name2", "addr2");
- LogWriterSupport.getLogWriter().fine("SJ:TX PUT 2");
+ LogWriterUtils.getLogWriter().fine("SJ:TX PUT 2");
prRegion.put(custIdTwo, customerTwo);
- LogWriterSupport.getLogWriter().fine("SJ:TX COMMIT");
+ LogWriterUtils.getLogWriter().fine("SJ:TX COMMIT");
mgr.commit();
return null;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistributedTransactionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistributedTransactionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistributedTransactionDUnitTest.java
index 928a7bb..2eaef9c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistributedTransactionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/disttx/DistributedTransactionDUnitTest.java
@@ -60,7 +60,7 @@ import com.gemstone.gemfire.internal.cache.execute.data.OrderId;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -1952,7 +1952,7 @@ public class DistributedTransactionDUnitTest extends CacheTestCase {
public Exception ex = new Exception();
public void run() {
- LogWriterSupport.getLogWriter().info("Inside TxConflictRunnable.TxThread after aquiring locks");
+ LogWriterUtils.getLogWriter().info("Inside TxConflictRunnable.TxThread after aquiring locks");
CacheTransactionManager mgr = getGemfireCache().getTxManager();
mgr.setDistributed(true);
mgr.begin();
@@ -1965,10 +1965,10 @@ public class DistributedTransactionDUnitTest extends CacheTestCase {
mgr.commit();
} catch (CommitConflictException ce) {
gotConflict = true;
- LogWriterSupport.getLogWriter().info("Received exception ", ce);
+ LogWriterUtils.getLogWriter().info("Received exception ", ce);
} catch (Exception e) {
gotOtherException = true;
- LogWriterSupport.getLogWriter().info("Received exception ", e);
+ LogWriterUtils.getLogWriter().info("Received exception ", e);
ex.initCause(e);
}
}
@@ -2087,7 +2087,7 @@ public class DistributedTransactionDUnitTest extends CacheTestCase {
public Exception ex = new Exception();
public void run() {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("Inside TxRunnable.TxThread after aquiring locks");
CacheTransactionManager mgr = getGemfireCache().getTxManager();
mgr.setDistributed(true);
@@ -2101,7 +2101,7 @@ public class DistributedTransactionDUnitTest extends CacheTestCase {
mgr.commit();
} catch (Exception e) {
gotException = true;
- LogWriterSupport.getLogWriter().info("Received exception ", e);
+ LogWriterUtils.getLogWriter().info("Received exception ", e);
ex.initCause(e);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/ClassNotFoundExceptionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/ClassNotFoundExceptionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/ClassNotFoundExceptionDUnitTest.java
index 82c45f6..8ab0b2c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/ClassNotFoundExceptionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/ClassNotFoundExceptionDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.pdx.PdxSerializable;
import com.gemstone.gemfire.pdx.PdxWriter;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -191,7 +191,7 @@ public class ClassNotFoundExceptionDUnitTest extends CacheTestCase {
public Object call() throws Exception {
disconnectFromDS();
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port);
cf.setPoolSubscriptionEnabled(true);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/JSSESocketJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/JSSESocketJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/JSSESocketJUnitTest.java
index 6d9e357..c8927b6 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/JSSESocketJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/JSSESocketJUnitTest.java
@@ -48,7 +48,7 @@ import org.junit.rules.TestName;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.util.test.TestUtil;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -122,7 +122,7 @@ public class JSSESocketJUnitTest {
oos.writeObject( expected );
oos.flush();
- Threads.join(serverThread, 30 * 1000, null);
+ ThreadUtils.join(serverThread, 30 * 1000);
client.close();
if ( expected.equals( receiver[0] ) ) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxDeleteFieldDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxDeleteFieldDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxDeleteFieldDUnitTest.java
index 54f9174..f7b6529 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxDeleteFieldDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxDeleteFieldDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.pdx.PdxWriter;
import com.gemstone.gemfire.pdx.internal.PdxType;
import com.gemstone.gemfire.pdx.internal.PdxUnreadData;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -177,7 +177,7 @@ public class PdxDeleteFieldDUnitTest extends CacheTestCase{
try {
FileUtil.delete(new File(path));
} catch (IOException e) {
- LogWriterSupport.getLogWriter().error("Unable to delete file", e);
+ LogWriterUtils.getLogWriter().error("Unable to delete file", e);
}
}
this.filesToBeDeleted.clear();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxRenameDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxRenameDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxRenameDUnitTest.java
index 7f31ae3..f10d953 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxRenameDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/PdxRenameDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.pdx.internal.EnumInfo;
import com.gemstone.gemfire.pdx.internal.PdxInstanceImpl;
import com.gemstone.gemfire.pdx.internal.PdxType;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -184,7 +184,7 @@ public class PdxRenameDUnitTest extends CacheTestCase{
try {
FileUtil.delete(new File(path));
} catch (IOException e) {
- LogWriterSupport.getLogWriter().error("Unable to delete file", e);
+ LogWriterUtils.getLogWriter().error("Unable to delete file", e);
}
}
this.filesToBeDeleted.clear();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/BackupDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/BackupDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/BackupDUnitTest.java
index 0b13b95..a91fb8e 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/BackupDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/BackupDUnitTest.java
@@ -55,7 +55,7 @@ import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DUnitEnv;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -79,7 +79,7 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
StringBuilder failures = new StringBuilder();
FileUtil.delete(getBackupDir(), failures);
if (failures.length() > 0) {
- LogWriterSupport.getLogWriter().error(failures.toString());
+ LogWriterUtils.getLogWriter().error(failures.toString());
}
}
@@ -89,9 +89,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
long lm0 = setBackupFiles(vm0);
@@ -133,9 +133,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
restoreBackup(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation async0 = createPersistentRegionAsync(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
AsyncInvocation async1 = createPersistentRegionAsync(vm1);
async0.getResult(MAX_WAIT);
@@ -156,9 +156,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
createData(vm0, 0, 5, "A", "region1");
@@ -190,9 +190,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
restoreBackup(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation async0 = createPersistentRegionAsync(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
AsyncInvocation async1 = createPersistentRegionAsync(vm1);
async0.getResult(MAX_WAIT);
@@ -221,14 +221,14 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
final VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
//create a bucket on vm0
createData(vm0, 0, 1, "A", "region1");
//create the pr on vm1, which won't have any buckets
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
@@ -280,9 +280,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
restoreBackup(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation async0 = createPersistentRegionAsync(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
AsyncInvocation async1 = createPersistentRegionAsync(vm1);
async0.getResult(MAX_WAIT);
@@ -381,14 +381,14 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
});
try {
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
//create twos bucket on vm0
createData(vm0, 0, 2, "A", "region1");
//create the pr on vm1, which won't have any buckets
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
@@ -430,9 +430,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
restoreBackup(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation async0 = createPersistentRegionAsync(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
AsyncInvocation async1 = createPersistentRegionAsync(vm1);
async0.getResult(MAX_WAIT);
@@ -461,9 +461,9 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createOverflowRegion(vm1);
createData(vm0, 0, 5, "A", "region1");
@@ -486,11 +486,11 @@ public class BackupDUnitTest extends PersistentPartitionedRegionTestBase {
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
- LogWriterSupport.getLogWriter().info("Creating region in VM2");
+ LogWriterUtils.getLogWriter().info("Creating region in VM2");
createPersistentRegion(vm2);
createData(vm0, 0, 5, "A", "region1");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug33359DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug33359DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug33359DUnitTest.java
index e5b9a23..bd0d478 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug33359DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug33359DUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -68,7 +68,7 @@ public class Bug33359DUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0.invoke(Bug33359DUnitTest.class, "createCacheVM0");
vm1.invoke(Bug33359DUnitTest.class, "createCacheVM1");
- LogWriterSupport.getLogWriter().fine("Cache created in successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created in successfully");
}
public void preTearDown(){
@@ -141,7 +141,7 @@ public class Bug33359DUnitTest extends DistributedTestCase {
for(int i=0; i<10; i++){
region.put(new Integer(i), Integer.toString(i));
}
- LogWriterSupport.getLogWriter().fine("Did all puts successfully");
+ LogWriterUtils.getLogWriter().fine("Did all puts successfully");
}
}
);
@@ -149,7 +149,7 @@ public class Bug33359DUnitTest extends DistributedTestCase {
vm0.invoke(new CacheSerializableRunnable("perform clear on region"){
public void run2() throws CacheException {
region.clear();
- LogWriterSupport.getLogWriter().fine("region is cleared");
+ LogWriterUtils.getLogWriter().fine("region is cleared");
}
}
);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37241DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37241DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37241DUnitTest.java
index dbaf110..f736199 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37241DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37241DUnitTest.java
@@ -31,7 +31,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.ReplyException;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/*
@@ -156,10 +156,10 @@ public class Bug37241DUnitTest extends DistributedTestCase
.getName());
//added for not to log exepected IllegalStateExcepion.
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedException action=add>" + expectedReplyException
+ "</ExpectedException>");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedException action=add>" + expectedException
+ "</ExpectedException>");
cache.getLogger().info(
@@ -200,10 +200,10 @@ public class Bug37241DUnitTest extends DistributedTestCase
cache.getLogger().info(
"<ExpectedException action=remove>" + expectedReplyException
+ "</ExpectedException>");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedException action=remove>" + expectedException
+ "</ExpectedException>");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedException action=remove>" + expectedReplyException
+ "</ExpectedException>");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37377DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37377DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37377DUnitTest.java
index a0398f1..766ecd4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37377DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug37377DUnitTest.java
@@ -35,9 +35,8 @@ import com.gemstone.gemfire.internal.cache.lru.EnableLRU;
import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -315,7 +314,7 @@ public class Bug37377DUnitTest extends CacheTestCase
vm0.invoke(putSomeEntries());
AsyncInvocation as1 = vm1.invokeAsync(createCacheForVM1());
Wait.pause(10000);
- Threads.join(as1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(as1, 30 * 1000);
vm0.invoke(closeCacheForVM(0));
vm1.invoke(closeCacheForVM(1));
vm1.invoke(createCacheForVM1());
[28/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitHelper.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitHelper.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitHelper.java
index 20b57cf..1860038 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitHelper.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitHelper.java
@@ -137,7 +137,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
localRegion = cache.createRegion(regionName, attr.create());
}
catch (IllegalStateException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Creation caught IllegalStateException",
ex);
@@ -174,7 +174,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
localRegion = cache.createRegion(regionName, attr.create());
}
catch (IllegalStateException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Creation caught IllegalStateException",
ex);
@@ -218,7 +218,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
localRegion = cache.createRegionFactory(RegionShortcut.REPLICATE).create(regionName);
}
catch (IllegalStateException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreate: Creation caught IllegalStateException",
ex);
@@ -311,7 +311,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
partitionedregion = cache.createRegion(regionName, attr.create());
}
catch (IllegalStateException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreateWithRedundancy: Creation caught IllegalStateException",
ex);
@@ -360,7 +360,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
partitionedregion = cache.createRegion(regionName, attr.create());
}
catch (IllegalStateException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreateWithRedundancy: Creation caught IllegalStateException",
ex);
@@ -415,7 +415,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
partitionedregion = cache.createRegion(regionName, attr.create());
}
catch (IllegalStateException ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.warning(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRCreateWithRedundancy: Creation caught IllegalStateException",
ex);
@@ -533,10 +533,10 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
} catch (EntryExistsException e) {
// Do nothing let it go
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("EntryExistsException was thrown for key "+ j);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("EntryExistsException was thrown for key "+ j);
} catch (EntryNotFoundException e) {
// Do nothing let it go
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("EntryNotFoundException was thrown for key "+ j);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("EntryNotFoundException was thrown for key "+ j);
}
}
}
@@ -742,7 +742,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -751,14 +751,14 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
}
catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -869,7 +869,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -885,7 +885,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -894,14 +894,14 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
}
catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -1002,7 +1002,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
}
@@ -1013,7 +1013,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -1022,14 +1022,14 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
}
catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -1142,7 +1142,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
ssORrs.CompareQueryResultsWithoutAndWithIndexes(r, queries.length,true,rq);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -1156,7 +1156,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -1165,14 +1165,14 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
}
catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -1268,7 +1268,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
// "Finished executing PR query: " + qStr);
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -1284,7 +1284,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -1293,14 +1293,14 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
}
catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -1414,7 +1414,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
r[j][1] = region.query(query[j]);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryWithConstantsAndComparingResults: Queries Executed successfully on Local region & PR Region");
@@ -1424,7 +1424,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
catch (QueryException e) {
// assertTrue("caught Exception"+ e.getMessage(),false);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryWithConstantsAndComparingResults: Caught an Exception while querying Constants"
+ e, e);
@@ -1544,7 +1544,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#compareTwoQueryResults: Type 2 is NULL "
+ type2, type2);
if ((type1.getClass().getName()).equals(type2.getClass().getName())) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#compareTwoQueryResults: Both Search Results are of the same Type i.e.--> "
+ ((SelectResults)r[j][0]).getCollectionType()
@@ -1552,7 +1552,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
else {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().error(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().error(
"PRQueryDUnitHelper#compareTwoQueryResults: Classes are : "
+ type1.getClass().getName() + " "
+ type2.getClass().getName());
@@ -1562,14 +1562,14 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
int size0 = ((SelectResults)r[j][0]).size();
int size1 = ((SelectResults)r[j][1]).size();
if (size0 == size1) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#compareTwoQueryResults: Both Search Results are non-zero and are of Same Size i.e. Size= "
+ size1 + ";j=" + j);
}
else {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#compareTwoQueryResults: FAILED:Search resultSet size are different in both cases; size0=" +
size0 + ";size1=" + size1 + ";j=" + j);
@@ -1682,7 +1682,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException qe) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRInvalidQuery: Caught another Exception while querying , Exception is "
+ qe, qe);
@@ -1723,11 +1723,11 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
+ "</ExpectedException>");
Region region = cache.getRegion(regionName);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForRegionClose: Closing region");
region.close();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForRegionClose: Region Closed on VM ");
// Region partitionedregion = null;
@@ -1741,7 +1741,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
.create();
attr.setPartitionAttributes(prAttr);
cache.createRegion(regionName, attr.create());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForRegionClose: Region Recreated on VM ");
getCache().getLogger().info(
@@ -1782,17 +1782,17 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"<ExpectedException action=add>" + expectedReplyException
+ "</ExpectedException>");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForCacheClose: Closing cache");
closeCache();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForCacheClose: cache Closed on VM ");
cache = getCache();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForCacheClose: Recreating the cache ");
// Region partitionedregion = null;
@@ -1822,7 +1822,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
} finally {
InternalResourceManager.setResourceObserver(null);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PROperationWithQueryDUnitTest#getCacheSerializableRunnableForCacheClose: cache Recreated on VM ");
getCache().getLogger().info(
@@ -1891,7 +1891,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
Region region = cache.getRegion(regionName);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#getCacheSerializableRunnableForRegionClose: Destroying region "
+ region);
@@ -1946,7 +1946,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
long endTimeLocal=System.currentTimeMillis();
long queryTimeLocal = endTimeLocal-startTimeLocal;
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("PRQueryDUnitHelper#PRQueryingVsLocalQuerying: Time to Query Local cache "+queryTimeLocal + " ms");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#PRQueryingVsLocalQuerying: Time to Query Local cache "+queryTimeLocal + " ms");
long startTimePR = System.currentTimeMillis();
for (int k = 0; k < query.length; k++) {
@@ -1956,8 +1956,8 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
long endTimePR = System.currentTimeMillis();
long queryTimePR = endTimePR-startTimePR;
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("PRQueryDUnitHelper#PRQueryingVsLocalQuerying: Time to Query PR "+queryTimePR+" ms");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper#PRQueryingVsLocalQuerying: Time to Query PR "+queryTimePR+" ms");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#PRQueryingVsLocalQuerying: Queries Executed successfully on Local region & PR Region");
@@ -1969,7 +1969,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
}
catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#PRQueryingVsLocalQuerying: Caught QueryException while querying"
+ e, e);
@@ -2004,34 +2004,34 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
public void displayResults(){
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("PRQueryDUnitHelper:PerfResultsObject#displayResults");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("PRQueryDUnitHelper:PerfResultsObject#displayResults");
BufferedWriter out = new BufferedWriter(new FileWriter("PRQueryPerfDUnitTest.txt", true));
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("~~~~~~~~~~~~~~~~~~~~~~~PR Querying Performance Results~~~~~~~~~~~~~~~~~~~~~~~");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("~~~~~~~~~~~~~~~~~~~~~~~PR Querying Performance Results~~~~~~~~~~~~~~~~~~~~~~~");
out.write("~~~~~~~~~~~~~~~~~~~~~~~PR Querying Performance Results~~~~~~~~~~~~~~~~~~~~~~~\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(this.OperationDescription);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(this.OperationDescription);
out.write("\t"+this.OperationDescription+"\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Scope : "+this.Scope);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Scope : "+this.Scope);
out.write("Scope : "+this.Scope+"\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Redundancy Level : "+this.redundancy);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Redundancy Level : "+this.redundancy);
out.write("Redundancy Level : "+this.redundancy+"\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Number of Accessor : "+this.NumberOfAccessors);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Number of Accessor : "+this.NumberOfAccessors);
out.write("Number of Accessor : "+this.NumberOfAccessors+"\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Number of Datastore/s : "+this.NumberOfDataStores);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Number of Datastore/s : "+this.NumberOfDataStores);
out.write("Number of Datastore/s : "+this.NumberOfDataStores+"\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("QueryingTime Local : "+this.QueryingTimeLocal+" ms");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("QueryingTime Local : "+this.QueryingTimeLocal+" ms");
out.write("QueryingTime Local : "+this.QueryingTimeLocal+" ms\n\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("QueryingTime PR : "+this.QueryingTimePR+" ms");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("QueryingTime PR : "+this.QueryingTimePR+" ms");
out.write("QueryingTime PR : "+this.QueryingTimePR+" ms\n");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
out.write("\n\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n");
out.close();
} catch (IOException e) {
@@ -2546,7 +2546,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
+ " r2 where " + queries[j]).execute();
r[j][1] = r2.asList();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -2562,7 +2562,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -2570,13 +2570,13 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
} catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -2675,7 +2675,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
+ " r2, r2.positions.values pos2 where " + queries[j]).execute();
r[j][1] = r2.asList();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -2691,7 +2691,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -2699,13 +2699,13 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
} catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -2805,7 +2805,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
+ " r2, r2.positions.values pos2 where " + queries[j]).execute();
r[j][1] = r2.asList();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -2821,7 +2821,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -2829,13 +2829,13 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
} catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
@@ -2935,7 +2935,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
+ " r2 where " + queries[j]).execute();
r[j][1] = r2.asList();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Queries Executed successfully on Local region & PR Region");
@@ -2951,7 +2951,7 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (QueryException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.error(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught QueryException while querying"
+ e, e);
@@ -2959,13 +2959,13 @@ public class PRQueryDUnitHelper extends PartitionedRegionDUnitTestCase
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught unexpected query exception",
e);
} catch (RegionDestroyedException rde) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a RegionDestroyedException while querying as expected ",
rde);
} catch (CancelException cce) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRQueryAndCompareResults: Caught a CancelException while querying as expected ",
cce);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitTest.java
index c391215..098a881 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryDUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.internal.cache.PartitionedRegionQueryEvaluator;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -98,7 +98,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRDAckCreationAndQuerying() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -109,7 +109,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -120,17 +120,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -148,7 +148,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * stepSize), (3 * stepSize)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
(3 * (stepSize)), totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -156,14 +156,14 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, i, totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -182,7 +182,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRDAckCreationAndQueryingFull() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -194,7 +194,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -205,17 +205,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -233,7 +233,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * stepSize), (3 * stepSize)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfoliosAndPositions,
(3 * (stepSize)), totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -241,14 +241,14 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfoliosAndPositions, i, totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName, true));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -268,7 +268,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRDAckCreationAndQueryingWithConstants() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Querying PR Test with DACK Started*****");
@@ -283,7 +283,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -294,17 +294,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Successfully Created Local Region on VM0");
@@ -322,7 +322,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * stepSize), (3 * stepSize)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
(3 * (stepSize)), totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Inserted Portfolio data across PR's");
@@ -330,7 +330,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, i, totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : Inserted Portfolio data over Local Region on VM0");
@@ -339,7 +339,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
.invoke(PRQHelp
.getCacheSerializableRunnableForPRQueryWithConstantsAndComparingResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQueryingWithConstants : *Querying PR's with DACK Test ENDED*****");
}
@@ -524,7 +524,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
for (Object r: th.resultsPerMember.entrySet()){
Map.Entry e = (Map.Entry)r;
Integer res = (Integer)e.getValue();
- LogWriterSupport.getLogWriter().info("PRQueryDUnitTest#testQueryResultsFromMembers : \n" +
+ LogWriterUtils.getLogWriter().info("PRQueryDUnitTest#testQueryResultsFromMembers : \n" +
"Query [" + queries[q] + "] Member : " + e.getKey() + " results size :" + res.intValue());
assertEquals("Query [" + queries[q] + "]: The results returned by the member does not match the query limit size : Member : " + e.getKey(), limit[q], res.intValue());
}
@@ -631,7 +631,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
for (Object r: th.resultsPerMember.entrySet()){
Map.Entry e = (Map.Entry)r;
Integer res = (Integer)e.getValue();
- LogWriterSupport.getLogWriter().info("PRQueryDUnitTest#testQueryResultsFromMembers : \n" +
+ LogWriterUtils.getLogWriter().info("PRQueryDUnitTest#testQueryResultsFromMembers : \n" +
"Query [" + queries[q] + "] Member : " + e.getKey() + " results size :" + res.intValue());
if (res.intValue() != 0 /* accessor member */ || res.intValue() != limit[q]) {
assertEquals("Query [" + queries[q] + "]: The results returned by the member does not match the query limit size : Member : " + e.getKey(), limit[q], res.intValue());
@@ -733,7 +733,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRAccessorCreationAndQuerying() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Querying PR Test with DACK Started*****");
Host host = Host.getHost(0);
@@ -749,17 +749,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
// Creting PR's on the participating VM's
// Creating Accessor node on the VM
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Creating the Accessor node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
0));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully created the Accessor node in the PR");
// Creating the Datastores Nodes in the VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Creating the Datastore node in the PR");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -768,17 +768,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully Created the Datastore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully Created PR's across all VM's");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -795,7 +795,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * stepSize), (3 * stepSize)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
(3 * (stepSize)), totalDataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -806,7 +806,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Querying PR's Test ENDED*****");
}
@@ -827,7 +827,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
int dataSize = 10;
int step = 2;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -839,7 +839,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -850,17 +850,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -878,7 +878,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -886,14 +886,14 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(localName,
portfoliosAndPositions, i, dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -914,7 +914,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
int dataSize = 10;
int step = 2;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -926,7 +926,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -937,17 +937,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -965,7 +965,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -973,14 +973,14 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(localName,
portfoliosAndPositions, i, dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndVerifyOrder(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -1001,7 +1001,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
int step = 2;
Class valueConstraint = Portfolio.class;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Querying PR Test with DACK Started*****");
Host host = Host.getHost(0);
@@ -1017,17 +1017,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
// Creting PR's on the participating VM's
// Creating Accessor node on the VM
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Creating the Accessor node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
0, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully created the Accessor node in the PR");
// Creating the Datastores Nodes in the VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Creating the Datastore node in the PR");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -1036,17 +1036,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully Created the Datastore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully Created PR's across all VM's");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -1063,7 +1063,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfolio,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -1074,7 +1074,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQuerying : Querying PR's Test ENDED*****");
}
@@ -1084,7 +1084,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
int dataSize = 10;
int step = 2;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Querying PR Test with DACK Started*****");
@@ -1096,7 +1096,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
VM vm3 = host.getVM(3);
// Creating PR's on the participating VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating PR's on VM0, VM1 , VM2 , VM3");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -1107,17 +1107,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy, valueConstraint));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created PR's on VM0, VM1 , VM2 , VM3");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Creating Local region on VM0 to compare result Sets");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName, valueConstraint));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Successfully Created Local Region on VM0");
@@ -1135,7 +1135,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
(2 * step), (3 * step)));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(name, portfoliosAndPositions,
(3 * (step)), dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data across PR's");
@@ -1143,14 +1143,14 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPutsKeyValue(localName,
portfoliosAndPositions, i, dataSize));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : Inserted Portfolio data over Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPROrderByQueryWithLimit(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRDAckCreationAndQuerying : *Querying PR's with DACK Test ENDED*****");
}
@@ -1169,7 +1169,7 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
public void testPRAccessorCreationAndQueryingWithNoData() throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Querying PR Test with No Data Started*****");
Host host = Host.getHost(0);
@@ -1185,17 +1185,17 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
// Creting PR's on the participating VM's
// Creating Accessor node on the VM
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Creating the Accessor node in the PR");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
0));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Successfully created the Accessor node in the PR");
// Creating the Datastores Nodes in the VM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Creating the Datastore node in the PR");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -1204,24 +1204,24 @@ public class PRQueryDUnitTest extends PartitionedRegionDUnitTestCase
redundancy));
vm3.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Successfully Created the Datastore node in the PR");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Successfully Created PR's across all VM's");
// creating a local region on one of the JVM's
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Successfully Created Local Region on VM0");
// querying the VM for data
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitTest#testPRAccessorCreationAndQueryingWithNoData : Querying PR's Test No Data ENDED*****");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryPerfDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryPerfDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryPerfDUnitTest.java
index c55cabe..609c1a3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryPerfDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryPerfDUnitTest.java
@@ -79,7 +79,7 @@ public class PRQueryPerfDUnitTest extends PartitionedRegionDUnitTestCase {
throws Exception
{
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
log.info("BenchMarking PR Querying Test Started*****");
Host host = Host.getHost(0);
@@ -165,7 +165,7 @@ public class PRQueryPerfDUnitTest extends PartitionedRegionDUnitTestCase {
public void norun_testBenchmarkingQueryingOneAccessorTwoDS_Redundancy0()
throws Exception
{
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
log.info("BenchMarking PR Querying Test Started*****");
Host host = Host.getHost(0);
@@ -249,7 +249,7 @@ public class PRQueryPerfDUnitTest extends PartitionedRegionDUnitTestCase {
public void norun_testBenchmarkingQueryingOneAccessorTwoDS_D_ACK_Redundancy1()
throws Exception
{
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
log.info("BenchMarking PR Querying Test Started*****");
Host host = Host.getHost(0);
@@ -330,7 +330,7 @@ public class PRQueryPerfDUnitTest extends PartitionedRegionDUnitTestCase {
public void norun_testBenchmarkingQueryingOneAccessorThreeDS_Redundancy1()
throws Exception
{
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
log.info("BenchMarking PR Querying Test Started*****");
Host host = Host.getHost(0);
@@ -415,7 +415,7 @@ public class PRQueryPerfDUnitTest extends PartitionedRegionDUnitTestCase {
public void norun_testBenchmarkingQueryingOneAccessorThreeDS_Redundancy2()
throws Exception
{
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
log.info("BenchMarking PR Querying Test Started*****");
Host host = Host.getHost(0);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionCloseDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionCloseDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionCloseDUnitTest.java
index 479fc5f..8641f48 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionCloseDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionCloseDUnitTest.java
@@ -35,8 +35,8 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.internal.cache.ForceReattemptException;
@@ -92,7 +92,7 @@ public class PRQueryRegionCloseDUnitTest extends PartitionedRegionDUnitTestCase
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Querying PR Test with region Close PR operation*****");
Host host = Host.getHost(0);
@@ -104,33 +104,33 @@ public class PRQueryRegionCloseDUnitTest extends PartitionedRegionDUnitTestCase
vmList.add(vm1);
vmList.add(vm2);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Creating Accessor node on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Successfully Created Accessor node on VM0");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Creating PR's across all VM1 , VM2");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
vm2.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Successfully Created PR on VM1 , VM2");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Successfully Created Local Region on VM0");
@@ -140,36 +140,36 @@ public class PRQueryRegionCloseDUnitTest extends PartitionedRegionDUnitTestCase
final PortfolioData[] portfolio = PRQHelp.createPortfolioData(cnt, cntDest);
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
Random random = new Random();
AsyncInvocation async0;
// querying the VM for data
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Querying on VM0 both on PR Region & local ,also Comparing the Results sets from both");
async0 = vm0
.invokeAsync(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
name, localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Calling for Region.close() on either of the Datastores VM1 , VM2 at random and then recreating the cache, with a predefined Delay ");
for (int j = 0; j < queryTestCycle; j++) {
@@ -180,7 +180,7 @@ public class PRQueryRegionCloseDUnitTest extends PartitionedRegionDUnitTestCase
Wait.pause(threadSleepTime);
}
}
- Threads.join(async0, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 30 * 1000);
if (async0.exceptionOccurred()) {
// for now, certain exceptions when a region is closed are acceptable
@@ -200,7 +200,7 @@ public class PRQueryRegionCloseDUnitTest extends PartitionedRegionDUnitTestCase
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionCloseDUnitTest#testPRWithRegionCloseInOneDatastoreWithoutDelay: Querying with PR Operations ENDED*****");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedDUnitTest.java
index 9e7678c..c3a37dd 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedDUnitTest.java
@@ -35,8 +35,8 @@ import com.gemstone.gemfire.internal.cache.PartitionedRegionDUnitTestCase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.internal.cache.ForceReattemptException;
@@ -91,7 +91,7 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
throws Exception
{
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying with PR Destroy Region Operation Test Started");
Host host = Host.getHost(0);
@@ -105,16 +105,16 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
vmList.add(vm2);
vmList.add(vm3);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Creating Accessor node on VM0");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRAccessorCreate(name,
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Created Accessor node on VM0");
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Creating PR's across all VM1 , VM2, VM3");
vm1.invoke(PRQHelp.getCacheSerializableRunnableForPRCreate(name,
@@ -125,17 +125,17 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
redundancy));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Created PR on VM1 , VM2, VM3");
// creating a local region on one of the JVM's
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Creating Local Region on VM0");
vm0.invoke(PRQHelp
.getCacheSerializableRunnableForLocalRegionCreation(localName));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Created Local Region on VM0");
@@ -146,22 +146,22 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
// Putting the data into the accessor node
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Inserting Portfolio data through the accessor node");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(name, portfolio,
cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Inserted Portfolio data through the accessor node");
// Putting the same data in the local region created
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Inserting Portfolio data on local node VM0 for result Set Comparison");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRPuts(localName,
portfolio, cnt, cntDest));
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Successfully Inserted Portfolio data on local node VM0 for result Set Comparison");
@@ -170,7 +170,7 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
// Execute query first time. This is to make sure all the buckets are created
// (lazy bucket creation).
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying on VM0 First time");
vm0.invoke(PRQHelp.getCacheSerializableRunnableForPRQueryAndCompareResults(
@@ -178,7 +178,7 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
// Now execute the query. And while query execution in process destroy the region
// on one of the node.
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying on VM0 both on PR Region & local ,also Comparing the Results sets from both");
async0 = vm0
@@ -186,7 +186,7 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
name, localName));
Wait.pause(5);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Calling for Region.destroyRegion() on either of the Datastores VM1 , VM2 at random and then recreating the cache, with a predefined Delay ");
@@ -196,7 +196,7 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
name, redundancy));
- Threads.join(async0, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async0, 30 * 1000);
if (async0.exceptionOccurred()) {
// for Elbe, certain exceptions when a region is destroyed are acceptable
@@ -215,7 +215,7 @@ public class PRQueryRegionDestroyedDUnitTest extends PartitionedRegionDUnitTestC
Assert.fail("Unexpected exception during query", async0.getException());
}
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryRegionDestroyedDUnitTest#testPRWithRegionDestroyInOneDatastoreWithDelay: Querying with PR Destroy Region Operation Test ENDED");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedJUnitTest.java
index 1141b34..314d4a4 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/partitioned/PRQueryRegionDestroyedJUnitTest.java
@@ -34,7 +34,7 @@ import com.gemstone.gemfire.cache.query.RegionNotFoundException;
import com.gemstone.gemfire.cache.query.SelectResults;
import com.gemstone.gemfire.cache.query.data.PortfolioData;
import com.gemstone.gemfire.internal.cache.PartitionedRegionTestHelper;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -216,8 +216,8 @@ public class PRQueryRegionDestroyedJUnitTest
logger
.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: Waiting for the Threads to join ");
- Threads.join(t1, 30 * 1000, null);
- Threads.join(t2, 30 * 1000, null);
+ ThreadUtils.join(t1, 30 * 1000);
+ ThreadUtils.join(t2, 30 * 1000);
logger
.info("PRQueryRegionDestroyedJUnitTest#testQueryOnSingleDataStore: checking for any Unexpected Exception's occured");
[03/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaToRegionRelationCQRegistrationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaToRegionRelationCQRegistrationDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaToRegionRelationCQRegistrationDUnitTest.java
index b8a271b..4a7a17e 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaToRegionRelationCQRegistrationDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaToRegionRelationCQRegistrationDUnitTest.java
@@ -44,8 +44,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -334,7 +334,7 @@ public class DeltaToRegionRelationCQRegistrationDUnitTest extends DistributedTes
else
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to create CQ " + cqName1 + " . ");
err.initCause(ex);
@@ -375,7 +375,7 @@ public class DeltaToRegionRelationCQRegistrationDUnitTest extends DistributedTes
else
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to create CQ " + cqName1 + " . ");
err.initCause(ex);
@@ -670,7 +670,7 @@ public class DeltaToRegionRelationCQRegistrationDUnitTest extends DistributedTes
"createServerCache")).intValue();
client
.invoke(DeltaToRegionRelationCQRegistrationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()),
new Integer(PORT1) });
}
@@ -682,7 +682,7 @@ public class DeltaToRegionRelationCQRegistrationDUnitTest extends DistributedTes
"createServerCache")).intValue();
client
.invoke(DeltaToRegionRelationCQRegistrationDUnitTest.class, "createClientCacheWithNoRegion",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()),
new Integer(PORT1) });
}
/*
@@ -705,8 +705,8 @@ public class DeltaToRegionRelationCQRegistrationDUnitTest extends DistributedTes
"createServerCache")).intValue();
primary = (Integer)client2.invoke(
DeltaToRegionRelationCQRegistrationDUnitTest.class, "createClientCache2",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()),
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()),
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT1),
new Integer(PORT2) });
}
@@ -721,8 +721,8 @@ public class DeltaToRegionRelationCQRegistrationDUnitTest extends DistributedTes
"createServerCache")).intValue();
primary = (Integer)client2.invoke(
DeltaToRegionRelationCQRegistrationDUnitTest.class, "createClientCache3",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()),
- NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()),
+ NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT1),
new Integer(PORT2) });
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientSimpleDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientSimpleDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientSimpleDUnitTest.java
index bd351c5..ed0e565 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientSimpleDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientSimpleDUnitTest.java
@@ -46,8 +46,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -70,7 +70,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// normally
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -93,7 +93,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -152,7 +152,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
final String regionName1 = regionName + "1";
final String regionName2 = regionName + "2";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClients",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName1, regionName2, getClientDistributedSystemProperties(durableClientId)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName1, regionName2, getClientDistributedSystemProperties(durableClientId)});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -218,7 +218,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// stops normally
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -244,7 +244,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
public void run2() throws CacheException {
getSystem(getClientDistributedSystemProperties(durableClientId));
PoolFactoryImpl pf = (PoolFactoryImpl)PoolManager.createFactory();
- pf.init(getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, true));
+ pf.init(getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, true));
try {
pf.create("uncreatablePool");
fail("Should not have been able to create the pool");
@@ -274,7 +274,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -330,7 +330,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// stops normally
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId)});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -344,7 +344,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
VM durableClient2VM = this.publisherClientVM;
final String durableClientId2 = getName() + "_client2";
durableClient2VM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClient2VM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId2)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClient2VM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId2)});
// Send clientReady message
durableClient2VM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -412,7 +412,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -450,7 +450,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -519,7 +519,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Re-start the durable client
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -582,7 +582,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -608,7 +608,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
VM durableClient2VM = this.server2VM;
final String durableClientId2 = getName() + "_client2";
durableClient2VM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClient2VM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId2, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClient2VM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId2, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
durableClient2VM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -657,7 +657,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -783,7 +783,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Re-start durable client 1
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -794,7 +794,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Re-start durable client 2
durableClient2VM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClient2VM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId2), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClient2VM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId2), Boolean.TRUE});
// Send clientReady message
durableClient2VM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -874,7 +874,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -934,7 +934,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -974,7 +974,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Re-start the durable client that is kept alive on the server when it stops
// normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -1061,7 +1061,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
final String durableClientId = getName() + "_client";
// make the client use ClientCacheFactory so it will have a default pool
this.durableClientVM.invoke(CacheServerTestUtil.class, "createClientCache",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId)});
// verify that readyForEvents has not yet been called on the client's default pool
this.durableClientVM.invoke(new CacheSerializableRunnable("check readyForEvents not called") {
@@ -1192,7 +1192,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -1358,7 +1358,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
ClientServerObserver origObserver = ClientServerObserverHolder.setInstance(new ClientServerObserverAdapter() {
public void beforeSendingClientAck()
{
- LogWriterSupport.getLogWriter().info("beforeSendingClientAck invoked");
+ LogWriterUtils.getLogWriter().info("beforeSendingClientAck invoked");
}
});
@@ -1435,7 +1435,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 10;
@@ -3286,7 +3286,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -3351,7 +3351,7 @@ public class DurableClientSimpleDUnitTest extends DurableClientTestCase {
// Start up the client again. This time initialize it so that it is not kept
// alive on the servers when it stops normally.
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), server1Port, server2Port, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientTestCase.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientTestCase.java
index 26650bf..c0aba3c 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientTestCase.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableClientTestCase.java
@@ -65,7 +65,7 @@ import com.gemstone.gemfire.internal.cache.ha.HARegionQueue;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -126,7 +126,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// stops normally
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId)});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -182,7 +182,7 @@ public class DurableClientTestCase extends DistributedTestCase {
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] { getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), new Boolean(false), jp });
+ new Object[] { getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), new Boolean(false), jp });
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -315,7 +315,7 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout)});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -354,7 +354,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Re-start the durable client
this.restartDurableClient(new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()),serverPort, true),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()),serverPort, true),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout) });
@@ -398,7 +398,7 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout)});
// // Send clientReady message
// this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -447,7 +447,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Re-start the durable client (this is necessary so the
//netDown test will set the appropriate system properties.
this.restartDurableClient(new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout) });
@@ -487,7 +487,7 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 5; // keep the client alive for 5 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout)});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout)});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -543,7 +543,7 @@ public class DurableClientTestCase extends DistributedTestCase {
});
this.restartDurableClient(new Object[] {
- getClientPool(NetworkSupport.getServerHostName(Host.getHost(0)), serverPort, true),
+ getClientPool(NetworkUtils.getServerHostName(Host.getHost(0)), serverPort, true),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout) });
@@ -570,7 +570,7 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 120; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -593,7 +593,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 1;
@@ -696,7 +696,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Re-start the durable client
this.restartDurableClient(new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName,
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName,
getClientDistributedSystemProperties(durableClientId), Boolean.TRUE });
// Verify durable client on server
@@ -741,7 +741,7 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
//final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -764,7 +764,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some entries
final int numberOfEntries = 1;
@@ -852,7 +852,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Re-start the durable client
this.restartDurableClient(new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName,
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName,
getClientDistributedSystemProperties(durableClientId), Boolean.TRUE });
// Verify durable client on server
@@ -956,7 +956,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// stops normally
final String durableClientId = getName() + "_client";
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), serverPort, true), regionName, getClientDistributedSystemProperties(durableClientId), Boolean.TRUE});
// Send clientReady message
this.durableClientVM.invoke(new CacheSerializableRunnable("Send clientReady") {
@@ -1022,7 +1022,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Start a publisher
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), serverPort, false), regionName});
// Publish some messages
// Publish some entries
@@ -1107,10 +1107,10 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
Pool clientPool;
if (redundancyLevel == 1) {
- clientPool = getClientPool(NetworkSupport.getServerHostName(Host.getHost(0)), server1Port, server2Port, true);
+ clientPool = getClientPool(NetworkUtils.getServerHostName(Host.getHost(0)), server1Port, server2Port, true);
}
else {
- clientPool = getClientPool(NetworkSupport.getServerHostName(Host.getHost(0)), server1Port, server2Port, true, 0);
+ clientPool = getClientPool(NetworkUtils.getServerHostName(Host.getHost(0)), server1Port, server2Port, true, 0);
}
this.durableClientVM.invoke(CacheServerTestUtil.class, "disableShufflingOfEndpoints");
@@ -1144,7 +1144,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
// Publish some entries
final int numberOfEntries = 1;
@@ -1301,10 +1301,10 @@ public class DurableClientTestCase extends DistributedTestCase {
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
Pool clientPool;
if (redundancyLevel == 1) {
- clientPool = getClientPool(NetworkSupport.getServerHostName(Host.getHost(0)), server1Port, server2Port, true);
+ clientPool = getClientPool(NetworkUtils.getServerHostName(Host.getHost(0)), server1Port, server2Port, true);
}
else {
- clientPool = getClientPool(NetworkSupport.getServerHostName(Host.getHost(0)), server1Port, server2Port, true, 0);
+ clientPool = getClientPool(NetworkUtils.getServerHostName(Host.getHost(0)), server1Port, server2Port, true, 0);
}
this.durableClientVM.invoke(CacheServerTestUtil.class, "disableShufflingOfEndpoints");
@@ -1333,7 +1333,7 @@ public class DurableClientTestCase extends DistributedTestCase {
// Start normal publisher client
this.publisherClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] {getClientPool(NetworkSupport.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
+ new Object[] {getClientPool(NetworkUtils.getServerHostName(publisherClientVM.getHost()), server1Port, server2Port, false), regionName});
// Publish some entries
final int numberOfEntries = 1;
@@ -2010,7 +2010,7 @@ public class DurableClientTestCase extends DistributedTestCase {
CacheServerTestUtil.class,
"createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()),
serverPort1, true), regionName,
getClientDistributedSystemProperties(durableClientId, durableTimeoutInSeconds),
Boolean.TRUE });
@@ -2021,7 +2021,7 @@ public class DurableClientTestCase extends DistributedTestCase {
CacheServerTestUtil.class,
"createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()),
serverPort1, true), regionName,
getClientDistributedSystemProperties(durableClientId),
Boolean.TRUE });
@@ -2032,7 +2032,7 @@ public class DurableClientTestCase extends DistributedTestCase {
CacheServerTestUtil.class,
"createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(vm.getHost()),
+ getClientPool(NetworkUtils.getServerHostName(vm.getHost()),
serverPort1, serverPort2, true), regionName,
getClientDistributedSystemProperties(durableClientId),
Boolean.TRUE });
@@ -2043,7 +2043,7 @@ public class DurableClientTestCase extends DistributedTestCase {
CacheServerTestUtil.class,
"createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(vm.getHost()),
+ getClientPool(NetworkUtils.getServerHostName(vm.getHost()),
serverPort1, false), regionName });
}
@@ -2052,7 +2052,7 @@ public class DurableClientTestCase extends DistributedTestCase {
CacheServerTestUtil.class,
"createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(vm.getHost()),
+ getClientPool(NetworkUtils.getServerHostName(vm.getHost()),
serverPort1, serverPort2, false), regionName });
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/management/CacheServerManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/CacheServerManagementDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/CacheServerManagementDUnitTest.java
index a5b3dd8..a66da0b 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/CacheServerManagementDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/CacheServerManagementDUnitTest.java
@@ -51,8 +51,8 @@ import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.management.internal.SystemManagementService;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -122,7 +122,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
cqDUnitTest.createClient(client, port, host0);
@@ -178,7 +178,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocatorInVM(locator, locatorPort, "");
- String locators = NetworkSupport.getServerHostName(locator.getHost())+ "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(locator.getHost())+ "[" + locatorPort + "]";
int serverPort = startBridgeServerInVM(server, null, locators);
@@ -186,7 +186,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
addClientNotifListener(server,serverPort);
// Start a client and make sure that proper notification is received
- startBridgeClientInVM(client, null, NetworkSupport.getServerHostName(locator.getHost()), locatorPort);
+ startBridgeClientInVM(client, null, NetworkUtils.getServerHostName(locator.getHost()), locatorPort);
//stop the client and make sure the bridge server notifies
stopBridgeMemberVM(client);
@@ -215,7 +215,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
final int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
startLocator(locator, locatorPort, "");
- String locators = NetworkSupport.getServerHostName(locator.getHost())+ "[" + locatorPort + "]";
+ String locators = NetworkUtils.getServerHostName(locator.getHost())+ "[" + locatorPort + "]";
//Step 2:
int serverPort = startBridgeServerInVM(server, null, locators);
@@ -279,7 +279,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, String.valueOf(0));
props.setProperty(DistributionConfig.LOCATORS_NAME, otherLocators);
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.setProperty(DistributionConfig.JMX_MANAGER_HTTP_PORT_NAME, "0");
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
try {
@@ -287,7 +287,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
+ ".log");
InetAddress bindAddr = null;
try {
- bindAddr = InetAddress.getByName(NetworkSupport.getServerHostName(vm.getHost()));
+ bindAddr = InetAddress.getByName(NetworkUtils.getServerHostName(vm.getHost()));
} catch (UnknownHostException uhe) {
Assert.fail("While resolving bind address ", uhe);
}
@@ -400,7 +400,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
CacheServerMXBean bean = service
.getLocalCacheServerMXBean(serverPort);
assertEquals(bean.getIndexCount(), 1);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Index is " + bean.getIndexList()[0]
+ "</ExpectedString> ");
try {
@@ -491,7 +491,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
.getMessageTimeToLive());
assertEquals(CacheServer.DEFAULT_LOAD_POLL_INTERVAL, bean
.getLoadPollInterval());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> LoadProbe of the Server is "
+ bean.fetchLoadProbe().toString() + "</ExpectedString> ");
}
@@ -518,14 +518,14 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
String clientId = bean.getClientIds()[0];
assertNotNull(clientId);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> ClientId of the Server is " + clientId
+ "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Active Query Count "
+ bean.getActiveCQCount() + "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Registered Query Count "
+ bean.getRegisteredCQCount() + "</ExpectedString> ");
@@ -567,7 +567,7 @@ public class CacheServerManagementDUnitTest extends LocatorTestBase {
@Override
public void handleNotification(Notification notification, Object handback) {
assertNotNull(notification);
- LogWriterSupport.getLogWriter().info("Expected String :" + notification.toString());
+ LogWriterUtils.getLogWriter().info("Expected String :" + notification.toString());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ClientCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ClientCommandsDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ClientCommandsDUnitTest.java
index b4cb4f1..c77ab2b 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ClientCommandsDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ClientCommandsDUnitTest.java
@@ -63,9 +63,9 @@ import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData;
import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData.SectionResultData;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -984,7 +984,7 @@ public void verifyClientStats(CommandResult commandResultForClient, String serve
getSystem(props);
final ClientCacheFactory ccf = new ClientCacheFactory(props);
- ccf.addPoolServer(NetworkSupport.getServerHostName(server.getHost()), port);
+ ccf.addPoolServer(NetworkUtils.getServerHostName(server.getHost()), port);
ccf.setPoolSubscriptionEnabled(true);
ccf.setPoolPingInterval(1);
ccf.setPoolStatisticInterval(1);
@@ -1004,7 +1004,7 @@ public void verifyClientStats(CommandResult commandResultForClient, String serve
}else{
String poolName = "new_pool_" + System.currentTimeMillis();
try{
- PoolImpl p = (PoolImpl) PoolManager.createFactory().addServer(NetworkSupport.getServerHostName(server.getHost()), port)
+ PoolImpl p = (PoolImpl) PoolManager.createFactory().addServer(NetworkUtils.getServerHostName(server.getHost()), port)
.setThreadLocalConnections(true)
.setMinConnections(1)
.setSubscriptionEnabled(true)
@@ -1057,7 +1057,7 @@ public void verifyClientStats(CommandResult commandResultForClient, String serve
protected Properties getServerProperties() {
Properties p = new Properties();
- p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
return p;
}
@@ -1397,7 +1397,7 @@ private void setUpNonSubscribedClient() throws Exception {
getSystem(props);
final ClientCacheFactory ccf = new ClientCacheFactory(props);
- ccf.addPoolServer(NetworkSupport.getServerHostName(server.getHost()), port);
+ ccf.addPoolServer(NetworkUtils.getServerHostName(server.getHost()), port);
ccf.setPoolSubscriptionEnabled(false);
ccf.setPoolPingInterval(1);
ccf.setPoolStatisticInterval(1);
@@ -1417,7 +1417,7 @@ private void setUpNonSubscribedClient() throws Exception {
}else{
String poolName = "new_pool_" + System.currentTimeMillis();
try{
- PoolImpl p = (PoolImpl) PoolManager.createFactory().addServer(NetworkSupport.getServerHostName(server.getHost()), port)
+ PoolImpl p = (PoolImpl) PoolManager.createFactory().addServer(NetworkUtils.getServerHostName(server.getHost()), port)
.setThreadLocalConnections(true)
.setMinConnections(1)
.setSubscriptionEnabled(false)
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DurableClientCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DurableClientCommandsDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DurableClientCommandsDUnitTest.java
index 3bce205..d8e65d9 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DurableClientCommandsDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/DurableClientCommandsDUnitTest.java
@@ -44,10 +44,10 @@ import com.gemstone.gemfire.management.cli.Result.Status;
import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -250,7 +250,7 @@ public class DurableClientCommandsDUnitTest extends CliCommandTestBase {
}
private void writeToLog(String text, String resultAsString) {
- LogWriterSupport.getLogWriter().info(getUniqueName() + ": " + text + "\n" + resultAsString);
+ LogWriterUtils.getLogWriter().info(getUniqueName() + ": " + text + "\n" + resultAsString);
}
private void setupSystem() throws Exception {
@@ -360,7 +360,7 @@ public class DurableClientCommandsDUnitTest extends CliCommandTestBase {
getSystem(props);
final ClientCacheFactory ccf = new ClientCacheFactory(props);
- ccf.addPoolServer(NetworkSupport.getServerHostName(server.getHost()), port);
+ ccf.addPoolServer(NetworkUtils.getServerHostName(server.getHost()), port);
ccf.setPoolSubscriptionEnabled(true);
ClientCache cache = (ClientCache)getClientCache(ccf);
@@ -423,7 +423,7 @@ public class DurableClientCommandsDUnitTest extends CliCommandTestBase {
protected Properties getServerProperties() {
Properties p = new Properties();
- p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ p.setProperty(DistributionConfig.LOCATORS_NAME, "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
return p;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestCQDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestCQDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestCQDUnitTest.java
index 75a41b6..03f19c3 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestCQDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestCQDUnitTest.java
@@ -21,8 +21,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.management.DistributedSystemMXBean;
import com.gemstone.gemfire.management.ManagementService;
import com.gemstone.gemfire.management.ManagementTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -83,12 +83,12 @@ public class TestCQDUnitTest extends ManagementTestBase {
public void testNumOfCQ() throws Exception {
initManagement(false);
- LogWriterSupport.getLogWriter().info("started testNumOfCQ");
+ LogWriterUtils.getLogWriter().info("started testNumOfCQ");
VM server = managedNodeList.get(1);
VM client = managedNodeList.get(2);
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
int serverPort = AvailablePortHelper.getRandomAvailableTCPPort();
cqDUnitTest.createServer(server, serverPort);
@@ -132,7 +132,7 @@ public class TestCQDUnitTest extends ManagementTestBase {
long numOfCQ = ((Number) managingNode.invoke(TestCQDUnitTest.class,
"getNumOfCQ")).intValue();
- LogWriterSupport.getLogWriter().info("testNumOfCQ numOfCQ= " + numOfCQ);
+ LogWriterUtils.getLogWriter().info("testNumOfCQ numOfCQ= " + numOfCQ);
cqDUnitTest.closeClient(client);
cqDUnitTest.closeServer(server);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientsDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientsDUnitTest.java
index 0e23900..9cfec67 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientsDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientsDUnitTest.java
@@ -21,8 +21,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.management.DistributedSystemMXBean;
import com.gemstone.gemfire.management.ManagementService;
import com.gemstone.gemfire.management.ManagementTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -91,11 +91,11 @@ public class TestClientsDUnitTest extends ManagementTestBase {
cqDUnitTest.createServer(server, serverPort);
final int port = server.invokeInt(CqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client, port, host0);
Integer numOfClients = (Integer) managingNode.invoke(
TestClientsDUnitTest.class, "getNumOfClients");
- LogWriterSupport.getLogWriter().info("testNumOfClients numOfClients = " + numOfClients);
+ LogWriterUtils.getLogWriter().info("testNumOfClients numOfClients = " + numOfClients);
cqDUnitTest.closeClient(client);
cqDUnitTest.closeServer(server);
assertEquals(1, numOfClients.intValue());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestServerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestServerDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestServerDUnitTest.java
index fce6d91..9d880a7 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestServerDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestServerDUnitTest.java
@@ -21,7 +21,7 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.management.DistributedSystemMXBean;
import com.gemstone.gemfire.management.ManagementService;
import com.gemstone.gemfire.management.ManagementTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -86,7 +86,7 @@ public class TestServerDUnitTest extends ManagementTestBase {
cqDUnitTest.createServer(server, serverPort);
int serverCount = ((Number) managingNode.invoke(TestServerDUnitTest.class,
"getNumOfServersFromMBean")).intValue();
- LogWriterSupport.getLogWriter().info("TestServerDUnitTest serverCount =" + serverCount);
+ LogWriterUtils.getLogWriter().info("TestServerDUnitTest serverCount =" + serverCount);
cqDUnitTest.closeServer(server);
assertEquals(1, serverCount);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientAuthzObjectModDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientAuthzObjectModDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientAuthzObjectModDUnitTest.java
index b34b4a2..020c37b 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientAuthzObjectModDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientAuthzObjectModDUnitTest.java
@@ -34,7 +34,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.security.ObjectWithAuthz;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
/**
@@ -339,15 +339,15 @@ public class ClientAuthzObjectModDUnitTest extends ClientAuthorizationTestBase {
String authInit = gen.getAuthInit();
String authenticator = gen.getAuthenticator();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsObjectModWithFailover: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsObjectModWithFailover: Using authenticator: "
+ authenticator);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsObjectModWithFailover: Using pre-operation accessor: "
+ preAccessor);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsObjectModWithFailover: Using post-operation accessor: "
+ postAccessor);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientCQPostAuthorizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientCQPostAuthorizationDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientCQPostAuthorizationDUnitTest.java
index 510b134..bf6eec4 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientCQPostAuthorizationDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientCQPostAuthorizationDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -304,7 +304,7 @@ public class ClientCQPostAuthorizationDUnitTest extends
+ SecurityTestUtil.proxyCaches[i].getRegion(regionName).getFullPath();
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -318,7 +318,7 @@ public class ClientCQPostAuthorizationDUnitTest extends
AssertionError err = new AssertionError("Failed to create CQ " + cqName
+ " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
throw err;
}
}
@@ -346,16 +346,16 @@ public class ClientCQPostAuthorizationDUnitTest extends
try {
cq1 = cqService.getCq(cqName);
if (cq1 == null) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Failed to get CqQuery object for CQ name: " + cqName);
fail("Failed to get CQ " + cqName);
} else {
- LogWriterSupport.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
+ LogWriterUtils.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
}
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
- LogWriterSupport.getLogWriter().error(ex);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().error(ex);
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
err.initCause(ex);
@@ -369,9 +369,9 @@ public class ClientCQPostAuthorizationDUnitTest extends
cqResults = cq1.executeWithInitialResults();
} catch (CqException ce) {
if (ce.getCause() instanceof NotAuthorizedException && !postAuthzAllowed[i]) {
- LogWriterSupport.getLogWriter().info("Got expected exception for CQ " + cqName);
+ LogWriterUtils.getLogWriter().info("Got expected exception for CQ " + cqName);
} else {
- LogWriterSupport.getLogWriter().info("CqService is: " + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is: " + cqService);
ce.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
@@ -379,14 +379,14 @@ public class ClientCQPostAuthorizationDUnitTest extends
throw err;
}
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("CqService is: " + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is: " + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
err.initCause(ex);
throw err;
}
- LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
+ LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
assertTrue("executeWithInitialResults() state mismatch", cq1
.getState().isRunning());
if (expectedResultsSize >= 0) {
@@ -398,9 +398,9 @@ public class ClientCQPostAuthorizationDUnitTest extends
cq1.execute();
} catch (CqException ce) {
if (ce.getCause() instanceof NotAuthorizedException && !postAuthzAllowed[i]) {
- LogWriterSupport.getLogWriter().info("Got expected exception for CQ " + cqName);
+ LogWriterUtils.getLogWriter().info("Got expected exception for CQ " + cqName);
} else {
- LogWriterSupport.getLogWriter().info("CqService is: " + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is: " + cqService);
ce.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
@@ -412,7 +412,7 @@ public class ClientCQPostAuthorizationDUnitTest extends
+ cqName);
err.initCause(ex);
if (expectedErr == null) {
- LogWriterSupport.getLogWriter().info("CqService is: " + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is: " + cqService, err);
}
throw err;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientPostAuthorizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientPostAuthorizationDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientPostAuthorizationDUnitTest.java
index b304885..3cb3443 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientPostAuthorizationDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/ClientPostAuthorizationDUnitTest.java
@@ -27,7 +27,7 @@ import security.CredentialGenerator;
import com.gemstone.gemfire.cache.operations.OperationContext.OperationCode;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
/**
* Tests for authorization from client to server. This tests for authorization
@@ -127,10 +127,10 @@ public class ClientPostAuthorizationDUnitTest extends
String accessor = gen.getAuthorizationCallback();
TestAuthzCredentialGenerator tgen = new TestAuthzCredentialGenerator(gen);
- LogWriterSupport.getLogWriter().info("testAllPostOps: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testAllPostOps: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testAllPostOps: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testAllPostOps: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testAllPostOps: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, true,
@@ -316,7 +316,7 @@ public class ClientPostAuthorizationDUnitTest extends
OperationWithAction.OPBLOCK_NO_FAILOVER };
AuthzCredentialGenerator gen = getXmlAuthzGenerator();
- LogWriterSupport.getLogWriter().info("Executing opblocks with credential generator " + gen);
+ LogWriterUtils.getLogWriter().info("Executing opblocks with credential generator " + gen);
CredentialGenerator cGen = gen.getCredentialGenerator();
Properties extraAuthProps = cGen.getSystemProperties();
Properties javaProps = cGen.getJavaProperties();
@@ -326,11 +326,11 @@ public class ClientPostAuthorizationDUnitTest extends
String accessor = gen.getAuthorizationCallback();
TestAuthzCredentialGenerator tgen = new TestAuthzCredentialGenerator(gen);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllOpsNotifications: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllOpsNotifications: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllOpsNotifications: Using accessor: " + accessor);
// Start servers with all required properties
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserAPIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserAPIDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserAPIDUnitTest.java
index b13da33..ec85a1e 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserAPIDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserAPIDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.PoolManagerImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import security.DummyCredentialGenerator;
@@ -138,11 +138,11 @@ public class MultiuserAPIDUnitTest extends ClientAuthorizationTestBase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testValidCredentials: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info("testValidCredentials: Using authinit: " + authInit);
// Start the servers
Integer locPort1 = SecurityTestUtil.getLocatorPort();
@@ -158,12 +158,12 @@ public class MultiuserAPIDUnitTest extends ClientAuthorizationTestBase {
// Start the clients with valid credentials
Properties credentials1 = gen.getValidCredentials(1);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: For first client credentials: " + credentials1
+ " : " + javaProps1);
Properties credentials2 = gen.getValidCredentials(2);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: For second client credentials: " + credentials2
+ " : " + javaProps2);
client1.invoke(MultiuserAPIDUnitTest.class, "createCacheClient",
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserDurableCQAuthzDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserDurableCQAuthzDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserDurableCQAuthzDUnitTest.java
index 082d352..23f496d 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserDurableCQAuthzDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/security/MultiuserDurableCQAuthzDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.logging.InternalLogWriter;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
/**
@@ -320,7 +320,7 @@ public class MultiuserDurableCQAuthzDUnitTest extends
+ SecurityTestUtil.proxyCaches[i].getRegion(regionName).getFullPath();
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -334,7 +334,7 @@ public class MultiuserDurableCQAuthzDUnitTest extends
AssertionError err = new AssertionError("Failed to create CQ " + cqName
+ " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
throw err;
}
}
@@ -362,16 +362,16 @@ public class MultiuserDurableCQAuthzDUnitTest extends
try {
cq1 = cqService.getCq(cqName);
if (cq1 == null) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Failed to get CqQuery object for CQ name: " + cqName);
fail("Failed to get CQ " + cqName);
} else {
- LogWriterSupport.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
+ LogWriterUtils.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
}
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
- LogWriterSupport.getLogWriter().error(ex);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().error(ex);
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
err.initCause(ex);
@@ -384,14 +384,14 @@ public class MultiuserDurableCQAuthzDUnitTest extends
try {
cqResults = cq1.executeWithInitialResults();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("CqService is: " + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is: " + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
err.initCause(ex);
throw err;
}
- LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
+ LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
assertTrue("executeWithInitialResults() state mismatch", cq1
.getState().isRunning());
if (expectedResultsSize >= 0) {
@@ -406,7 +406,7 @@ public class MultiuserDurableCQAuthzDUnitTest extends
+ cqName);
err.initCause(ex);
if (expectedErr == null) {
- LogWriterSupport.getLogWriter().info("CqService is: " + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is: " + cqService, err);
}
throw err;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/UpdateVersionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/UpdateVersionDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/UpdateVersionDUnitTest.java
index 5b2dda1..8c329ef 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/UpdateVersionDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/UpdateVersionDUnitTest.java
@@ -56,7 +56,7 @@ import com.gemstone.gemfire.internal.cache.wan.InternalGatewaySenderFactory;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -189,7 +189,7 @@ public class UpdateVersionDUnitTest extends DistributedTestCase {
throw new RuntimeException("unexpected exception", e);
}
if (entry != null) {
- LogWriterSupport.getLogWriter().info("found entry " + entry);
+ LogWriterUtils.getLogWriter().info("found entry " + entry);
}
return (entry != null);
}
@@ -453,7 +453,7 @@ public class UpdateVersionDUnitTest extends DistributedTestCase {
throw new RuntimeException("unexpected exception", e);
}
if (entry != null) {
- LogWriterSupport.getLogWriter().info("found entry " + entry);
+ LogWriterUtils.getLogWriter().info("found entry " + entry);
}
return (entry != null);
}
@@ -592,7 +592,7 @@ public class UpdateVersionDUnitTest extends DistributedTestCase {
throw new RuntimeException("unexpected exception", e);
}
if (entry != null) {
- LogWriterSupport.getLogWriter().info("found entry " + entry);
+ LogWriterUtils.getLogWriter().info("found entry " + entry);
}
return (entry != null);
}
@@ -655,7 +655,7 @@ public class UpdateVersionDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost[" + locPort + "]");
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
props.setProperty(DistributionConfig.USE_CLUSTER_CONFIGURATION_NAME, "false");
InternalDistributedSystem ds = test.getSystem(props);
[04/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PutAllCSDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PutAllCSDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PutAllCSDUnitTest.java
index 37d272f..cc5876f 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PutAllCSDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/PutAllCSDUnitTest.java
@@ -77,14 +77,14 @@ import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -156,7 +156,7 @@ public void testOneServer() throws CacheException, InterruptedException {
VM client2 = host.getVM(3);
final String regionName = getUniqueName();
final int serverPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
// set <false, true> means <PR=false, notifyBySubscription=true> to enable registerInterest and CQ
createBridgeServer(server, regionName, serverPort, false, 0, null);
@@ -177,7 +177,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// registerInterest for ALL_KEYS
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -203,7 +203,7 @@ public void testOneServer() throws CacheException, InterruptedException {
CqAttributes cqa1 = cqf1.create();
String cqName1 = "EOInfoTracker";
String queryStr1 = "SELECT ALL * FROM /root/"+regionName+" ii WHERE ii.getTicker() >= '10' and ii.getTicker() < '20'";
- LogWriterSupport.getLogWriter().info("Query String: "+queryStr1);
+ LogWriterUtils.getLogWriter().info("Query String: "+queryStr1);
try {
QueryService cqService = getCache().getQueryService();
CqQuery EOTracker = cqService.newCq(cqName1, queryStr1, cqa1);
@@ -213,11 +213,11 @@ public void testOneServer() throws CacheException, InterruptedException {
for (int i=0; i<list1.size(); i++) {
Struct s = (Struct)list1.get(i);
TestObject o = (TestObject)s.get("value");
- LogWriterSupport.getLogWriter().info("InitialResult:"+i+":"+o);
+ LogWriterUtils.getLogWriter().info("InitialResult:"+i+":"+o);
localregion.put("key-"+i, o);
}
if (localregion.size() > 0) {
- LogWriterSupport.getLogWriter().info("CQ is ready");
+ LogWriterUtils.getLogWriter().info("CQ is ready");
synchronized(lockObject) {
lockObject.notify();
}
@@ -297,8 +297,8 @@ public void testOneServer() throws CacheException, InterruptedException {
if (obj != null) {
// wait for the key to be destroyed
Wait.pause(100);
- if (LogWriterSupport.getLogWriter().fineEnabled()) {
- LogWriterSupport.getLogWriter().info("Waiting 100ms("+cnt+") for key-" + i + " to be destroyed");
+ if (LogWriterUtils.getLogWriter().fineEnabled()) {
+ LogWriterUtils.getLogWriter().info("Waiting 100ms("+cnt+") for key-" + i + " to be destroyed");
}
cnt++;
} else {
@@ -329,7 +329,7 @@ public void testOneServer() throws CacheException, InterruptedException {
obj = (TestObject)localregion.get("key-"+i);
if (obj == null || obj.getPrice() != i*10) {
Wait.pause(100);
- LogWriterSupport.getLogWriter().info("Waiting 100ms("+cnt+") for obj.getPrice() == i*10 at entry "+i);
+ LogWriterUtils.getLogWriter().info("Waiting 100ms("+cnt+") for obj.getPrice() == i*10 at entry "+i);
cnt++;
} else {
break;
@@ -343,7 +343,7 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
// verify stats for client putAll into distributed region
// 1. verify client staus
@@ -429,7 +429,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=false to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -444,7 +444,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// registerInterest for ALL_KEYS
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -500,7 +500,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=false to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -583,7 +583,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
assertEquals(0, region.size());
MyWriter mywriter = (MyWriter)region.getAttributes().getCacheWriter();
- LogWriterSupport.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
+ LogWriterUtils.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
assertEquals(numberOfEntries, mywriter.num_destroyed);
}
});
@@ -595,7 +595,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
assertEquals(0, region.size());
MyWriter mywriter = (MyWriter)region.getAttributes().getCacheWriter();
- LogWriterSupport.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
+ LogWriterUtils.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
// beforeDestroys are only triggered at server1 since the removeAll is submitted from client1
assertEquals(0, mywriter.num_destroyed);
}
@@ -623,8 +623,8 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
}
client1.invoke(new CacheSerializableRunnable(title
@@ -722,8 +722,8 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
}
client1.invoke(new CacheSerializableRunnable(title+"client1 removeAll") {
@@ -918,7 +918,7 @@ public void testOneServer() throws CacheException, InterruptedException {
VM client2 = host.getVM(3);
final String regionName = getUniqueName();
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=false to test local-invalidates
int serverPort1 = createServerRegion(server1, regionName, CCE);
@@ -991,7 +991,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
assertEquals(0, region.size());
MyWriter mywriter = (MyWriter)region.getAttributes().getCacheWriter();
- LogWriterSupport.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
+ LogWriterUtils.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
assertEquals(numberOfEntries, mywriter.num_destroyed);
}
});
@@ -1003,7 +1003,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
assertEquals(0, region.size());
MyWriter mywriter = (MyWriter)region.getAttributes().getCacheWriter();
- LogWriterSupport.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
+ LogWriterUtils.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
// beforeDestroys are only triggered at server1 since the removeAll is submitted from client1
assertEquals(0, mywriter.num_destroyed);
}
@@ -1145,7 +1145,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, isPR, redundantCopies, null);
@@ -1294,7 +1294,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, true, 0, "ds1");
@@ -1313,7 +1313,7 @@ public void testOneServer() throws CacheException, InterruptedException {
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -1399,7 +1399,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, true, 1, null);
@@ -1482,7 +1482,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
assertEquals(0, region.size());
MyWriter mywriter = (MyWriter)region.getAttributes().getCacheWriter();
- LogWriterSupport.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
+ LogWriterUtils.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
// beforeDestroys are only triggered at primary buckets. server1 and server2 each holds half of buckets
assertEquals(numberOfEntries/2, mywriter.num_destroyed);
}
@@ -1495,7 +1495,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
assertEquals(0, region.size());
MyWriter mywriter = (MyWriter)region.getAttributes().getCacheWriter();
- LogWriterSupport.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
+ LogWriterUtils.getLogWriter().info("server cachewriter triggered for destroy: "+mywriter.num_destroyed);
// beforeDestroys are only triggered at primary buckets. server1 and server2 each holds half of buckets
assertEquals(numberOfEntries/2, mywriter.num_destroyed);
}
@@ -1523,8 +1523,8 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
}
client1.invoke(new CacheSerializableRunnable(title
@@ -1601,8 +1601,8 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
}
client1.invoke(new CacheSerializableRunnable(title+"client1 removeAll") {
@@ -1773,7 +1773,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final String regionName = getUniqueName();
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -1828,7 +1828,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -1856,7 +1856,7 @@ public void testOneServer() throws CacheException, InterruptedException {
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client1 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client1 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -1886,21 +1886,21 @@ public void testOneServer() throws CacheException, InterruptedException {
int c2Size = getRegionSize(client2, regionName);
int s1Size = getRegionSize(server1, regionName);
int s2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
if (c1Size != 15) {
- LogWriterSupport.getLogWriter().info("waiting for client1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client1 to get all updates");
return false;
}
if (c2Size != 15) {
- LogWriterSupport.getLogWriter().info("waiting for client2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client2 to get all updates");
return false;
}
if (s1Size != 15) {
- LogWriterSupport.getLogWriter().info("waiting for server1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server1 to get all updates");
return false;
}
if (s2Size != 15) {
- LogWriterSupport.getLogWriter().info("waiting for server2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server2 to get all updates");
return false;
}
return true;
@@ -1950,21 +1950,21 @@ public void testOneServer() throws CacheException, InterruptedException {
int c2Size = getRegionSize(client2, regionName);
int s1Size = getRegionSize(server1, regionName);
int s2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
if (c1Size != 15) { // client 1 did not register interest
- LogWriterSupport.getLogWriter().info("waiting for client1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client1 to get all updates");
return false;
}
if (c2Size != 15*2) {
- LogWriterSupport.getLogWriter().info("waiting for client2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client2 to get all updates");
return false;
}
if (s1Size != 15*2) {
- LogWriterSupport.getLogWriter().info("waiting for server1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server1 to get all updates");
return false;
}
if (s2Size != 15*2) {
- LogWriterSupport.getLogWriter().info("waiting for server2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server2 to get all updates");
return false;
}
return true;
@@ -2007,21 +2007,21 @@ public void testOneServer() throws CacheException, InterruptedException {
int c2Size = getRegionSize(client2, regionName);
int s1Size = getRegionSize(server1, regionName);
int s2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
if (c1Size != 15-5) { // client 1 did not register interest
- LogWriterSupport.getLogWriter().info("waiting for client1 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for client1 to get all destroys");
return false;
}
if (c2Size != (15*2)-5) {
- LogWriterSupport.getLogWriter().info("waiting for client2 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for client2 to get all destroys");
return false;
}
if (s1Size != (15*2)-5) {
- LogWriterSupport.getLogWriter().info("waiting for server1 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for server1 to get all destroys");
return false;
}
if (s2Size != (15*2)-5) {
- LogWriterSupport.getLogWriter().info("waiting for server2 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for server2 to get all destroys");
return false;
}
return true;
@@ -2066,21 +2066,21 @@ public void testOneServer() throws CacheException, InterruptedException {
int c2Size = getRegionSize(client2, regionName);
int s1Size = getRegionSize(server1, regionName);
int s2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
if (c1Size != 15-5) { // client 1 did not register interest
- LogWriterSupport.getLogWriter().info("waiting for client1 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for client1 to get all destroys");
return false;
}
if (c2Size != (15*2)-5-5) {
- LogWriterSupport.getLogWriter().info("waiting for client2 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for client2 to get all destroys");
return false;
}
if (s1Size != (15*2)-5-5) {
- LogWriterSupport.getLogWriter().info("waiting for server1 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for server1 to get all destroys");
return false;
}
if (s2Size != (15*2)-5-5) {
- LogWriterSupport.getLogWriter().info("waiting for server2 to get all destroys");
+ LogWriterUtils.getLogWriter().info("waiting for server2 to get all destroys");
return false;
}
return true;
@@ -2116,7 +2116,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, true, 0, "ds1");
@@ -2153,7 +2153,7 @@ public void testOneServer() throws CacheException, InterruptedException {
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -2180,7 +2180,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// server2 will closeCache after created 10 keys
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if (async1.exceptionOccurred()) {
Assert.fail("Aync1 get exceptions:", async1.getException());
}
@@ -2189,14 +2189,14 @@ public void testOneServer() throws CacheException, InterruptedException {
// client2Size maybe more than client1Size
int client2Size = getRegionSize(client2, regionName);
int server1Size = getRegionSize(server1, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+client1Size+","+client2Size+","+server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+client1Size+","+client2Size+","+server1Size);
// assertEquals(server1Size, client1Size);
// restart server2
createBridgeServer(server2, regionName, serverPort2, true, 0, "ds1");
server1Size = getRegionSize(server1, regionName);
int server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after server2 restarted: "+client1Size+","+client2Size+","+server1Size+":"+server2Size);
+ LogWriterUtils.getLogWriter().info("region sizes after server2 restarted: "+client1Size+","+client2Size+","+server1Size+":"+server2Size);
assertEquals(client2Size, server1Size);
assertEquals(client2Size, server2Size);
@@ -2222,7 +2222,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int new_client1Size = getRegionSize(client1, regionName);
int new_client2Size = getRegionSize(client2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after re-run the putAll: "+new_client1Size+","+new_client2Size+","+new_server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes after re-run the putAll: "+new_client1Size+","+new_client2Size+","+new_server1Size);
assertEquals(server1Size+numberOfEntries/2, new_server1Size);
assertEquals(client1Size+numberOfEntries/2, new_client1Size);
assertEquals(client2Size+numberOfEntries/2, new_client2Size);
@@ -2231,7 +2231,7 @@ public void testOneServer() throws CacheException, InterruptedException {
createBridgeServer(server2, regionName, serverPort2, true, 0, "ds1");
server1Size = getRegionSize(server1, regionName);
server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after restart server2: "+server1Size+","+server2Size);
+ LogWriterUtils.getLogWriter().info("region sizes after restart server2: "+server1Size+","+server2Size);
assertEquals(server1Size, server2Size);
// add a cacheWriter for server to stop after created 15 keys
@@ -2259,7 +2259,7 @@ public void testOneServer() throws CacheException, InterruptedException {
new_server1Size = getRegionSize(server1, regionName);
int new_server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after restart server2: "+new_server1Size+","+new_server2Size);
+ LogWriterUtils.getLogWriter().info("region sizes after restart server2: "+new_server1Size+","+new_server2Size);
assertEquals(server1Size+15, new_server1Size);
assertEquals(server2Size+15, new_server2Size);
server1.invoke(removeExceptionTag1(expectedExceptions));
@@ -2296,7 +2296,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, true, 0, "ds1");
@@ -2315,7 +2315,7 @@ public void testOneServer() throws CacheException, InterruptedException {
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -2334,21 +2334,21 @@ public void testOneServer() throws CacheException, InterruptedException {
int c2Size = getRegionSize(client2, regionName);
int s1Size = getRegionSize(server1, regionName);
int s2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
if (c1Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for client1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client1 to get all updates");
return false;
}
if (c2Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for client2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client2 to get all updates");
return false;
}
if (s1Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for server1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server1 to get all updates");
return false;
}
if (s2Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for server2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server2 to get all updates");
return false;
}
return true;
@@ -2397,7 +2397,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// server2 will closeCache after creating 10 keys
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if (async1.exceptionOccurred()) {
Assert.fail("Aync1 get exceptions:", async1.getException());
}
@@ -2406,11 +2406,11 @@ public void testOneServer() throws CacheException, InterruptedException {
// client2Size maybe more than client1Size
client2Size = getRegionSize(client2, regionName);
server1Size = getRegionSize(server1, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+client1Size+","+client2Size+","+server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+client1Size+","+client2Size+","+server1Size);
// assertEquals(server1Size, client1Size);
// restart server2
- LogWriterSupport.getLogWriter().info("restarting server 2");
+ LogWriterUtils.getLogWriter().info("restarting server 2");
createBridgeServer(server2, regionName, serverPort2, true, 0, "ds1");
// Test Case1: Trigger singleHop putAll. Stop server2 in middle.
@@ -2421,7 +2421,7 @@ public void testOneServer() throws CacheException, InterruptedException {
client2Size = getRegionSize(client2, regionName);
server1Size = getRegionSize(server1, regionName);
server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after server2 restarted: "+client1Size+","+client2Size+","+server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes after server2 restarted: "+client1Size+","+client2Size+","+server1Size);
assertEquals(150, client1Size);
assertEquals(client2Size, server1Size);
assertEquals(client2Size, server2Size);
@@ -2449,7 +2449,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// Test Case 2: based on case 1, but this time, there should be no X keys
// created on server2.
- LogWriterSupport.getLogWriter().info("region sizes after re-run the putAll: "+new_client1Size+","+new_client2Size+","+new_server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes after re-run the putAll: "+new_client1Size+","+new_client2Size+","+new_server1Size);
assertEquals(server1Size+numberOfEntries/2, new_server1Size);
assertEquals(client1Size+numberOfEntries/2, new_client1Size);
assertEquals(client2Size+numberOfEntries/2, new_client2Size);
@@ -2458,7 +2458,7 @@ public void testOneServer() throws CacheException, InterruptedException {
createBridgeServer(server2, regionName, serverPort2, true, 0, "ds1");
server1Size = getRegionSize(server1, regionName);
server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after restart server2: "+server1Size+","+server2Size);
+ LogWriterUtils.getLogWriter().info("region sizes after restart server2: "+server1Size+","+server2Size);
assertEquals(server1Size, server2Size);
// add a cacheWriter for server to fail putAll after it created cacheWriterAllowedKeyNum keys
@@ -2490,7 +2490,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int oncemore_client2Size = getRegionSize(client2, regionName);
int oncemore_server1Size = getRegionSize(server1, regionName);
int oncemore_server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes in once more test: "
+ LogWriterUtils.getLogWriter().info("region sizes in once more test: "
+oncemore_client1Size+","+oncemore_client2Size+","+oncemore_server1Size+","+oncemore_server2Size);
int delta_at_server = oncemore_server1Size - server1Size;
assertEquals(new_client1Size+delta_at_server, oncemore_client1Size);
@@ -2528,7 +2528,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set <true, false> means <PR=true, notifyBySubscription=false> to test local-invalidates
createBridgeServer(server1, regionName, serverPort1, true, 1, "ds1");
@@ -2547,7 +2547,7 @@ public void testOneServer() throws CacheException, InterruptedException {
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -2566,21 +2566,21 @@ public void testOneServer() throws CacheException, InterruptedException {
int c2Size = getRegionSize(client2, regionName);
int s1Size = getRegionSize(server1, regionName);
int s2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+c1Size+","+c2Size+","+s1Size+","+s2Size);
if (c1Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for client1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client1 to get all updates");
return false;
}
if (c2Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for client2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for client2 to get all updates");
return false;
}
if (s1Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for server1 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server1 to get all updates");
return false;
}
if (s2Size != numberOfEntries) {
- LogWriterSupport.getLogWriter().info("waiting for server2 to get all updates");
+ LogWriterUtils.getLogWriter().info("waiting for server2 to get all updates");
return false;
}
return true;
@@ -2623,7 +2623,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// server2 will closeCache after created 10 keys
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if (async1.exceptionOccurred()) {
Assert.fail("Aync1 get exceptions:", async1.getException());
}
@@ -2633,7 +2633,7 @@ public void testOneServer() throws CacheException, InterruptedException {
client2Size = getRegionSize(client2, regionName);
server1Size = getRegionSize(server1, regionName);
// putAll should succeed after retry
- LogWriterSupport.getLogWriter().info("region sizes: "+client1Size+","+client2Size+","+server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+client1Size+","+client2Size+","+server1Size);
assertEquals(server1Size, client1Size);
assertEquals(server1Size, client2Size);
@@ -2642,7 +2642,7 @@ public void testOneServer() throws CacheException, InterruptedException {
server1Size = getRegionSize(server1, regionName);
server2Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes after server2 restarted: "+client1Size+","+client2Size+","+server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes after server2 restarted: "+client1Size+","+client2Size+","+server1Size);
assertEquals(client2Size, server1Size);
assertEquals(client2Size, server2Size);
@@ -2660,7 +2660,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int new_client2Size = getRegionSize(client2, regionName);
// putAll should succeed, all the numbers should match
- LogWriterSupport.getLogWriter().info("region sizes after re-run the putAll: "+new_client1Size+","+new_client2Size+","+new_server1Size);
+ LogWriterUtils.getLogWriter().info("region sizes after re-run the putAll: "+new_client1Size+","+new_client2Size+","+new_server1Size);
assertEquals(new_server1Size, new_client1Size);
assertEquals(new_server1Size, new_client2Size);
@@ -2685,7 +2685,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final String regionName = getUniqueName();
final int[] serverPorts = AvailablePortHelper.getRandomAvailableTCPPorts(3);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
final SharedCounter sc_server1 = new SharedCounter("server1");
final SharedCounter sc_server2 = new SharedCounter("server2");
@@ -2766,7 +2766,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int server1Size = getRegionSize(server1, regionName);
int server2Size = getRegionSize(server2, regionName);
int server3Size = getRegionSize(server2, regionName);
- LogWriterSupport.getLogWriter().info("region sizes: "+client1Size+","+server1Size+","+server2Size+","+server3Size);
+ LogWriterUtils.getLogWriter().info("region sizes: "+client1Size+","+server1Size+","+server2Size+","+server3Size);
AsyncInvocation async1 = client1.invokeAsync(new CacheSerializableRunnable(title+"client1 add listener and putAll") {
public void run2() throws CacheException {
@@ -2778,7 +2778,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// server1 and server2 will closeCache after created 10 keys
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
if (async1.exceptionOccurred()) {
Assert.fail("Aync1 get exceptions:", async1.getException());
}
@@ -2788,14 +2788,14 @@ public void testOneServer() throws CacheException, InterruptedException {
public void run2() throws CacheException {
Region r = getRootRegion().getSubregion(regionName);
MyListener l = (MyListener)r.getAttributes().getCacheListeners()[0];
- LogWriterSupport.getLogWriter().info("event counters : "+l.sc);
+ LogWriterUtils.getLogWriter().info("event counters : "+l.sc);
assertEquals(numberOfEntries, l.sc.num_create_event);
assertEquals(0, l.sc.num_update_event);
}
});
- LogWriterSupport.getLogWriter().info("event counters : "+myListener.sc);
+ LogWriterUtils.getLogWriter().info("event counters : "+myListener.sc);
assertEquals(numberOfEntries, myListener.sc.num_create_event);
assertEquals(0, myListener.sc.num_update_event);
@@ -2831,7 +2831,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -2860,7 +2860,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// registerInterest for ALL_KEYS
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client1 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client1 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -2871,7 +2871,7 @@ public void testOneServer() throws CacheException, InterruptedException {
// registerInterest for ALL_KEYS
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -2892,7 +2892,7 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
// verify bridge server 2 for asyn keys
server2.invoke(new CacheSerializableRunnable(title
@@ -2931,7 +2931,7 @@ public void testOneServer() throws CacheException, InterruptedException {
int serverPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int serverPort1 = serverPorts[0];
final int serverPort2 = serverPorts[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -2962,7 +2962,7 @@ public void testOneServer() throws CacheException, InterruptedException {
try {
doPutAll(regionName, "key-", thousandEntries);
} catch (Exception e) {
- LogWriterSupport.getLogWriter().info(title + "Expected SocketTimeOut:"+e.getMessage());
+ LogWriterUtils.getLogWriter().info(title + "Expected SocketTimeOut:"+e.getMessage());
exceptionTriggered = true;
}
assertTrue(exceptionTriggered);
@@ -2993,7 +2993,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -3015,7 +3015,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3025,7 +3025,7 @@ public void testOneServer() throws CacheException, InterruptedException {
try {
doPutAll(regionName, title, testEndPointSwitchNumber);
} catch (Exception e) {
- LogWriterSupport.getLogWriter().info(title + "Expected SocketTimeOut"+e.getMessage());
+ LogWriterUtils.getLogWriter().info(title + "Expected SocketTimeOut"+e.getMessage());
}
}
});
@@ -3113,7 +3113,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -3131,7 +3131,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client1 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client1 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3140,7 +3140,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
region.getAttributesMutator().addCacheListener(new MyListener(false));
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3168,8 +3168,8 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
// verify client 2 for asyn keys
client2.invokeAsync(new CacheSerializableRunnable(title
@@ -3241,12 +3241,12 @@ public void testOneServer() throws CacheException, InterruptedException {
}
region.putAll(map, "putAllCallback");
try {
- LogWriterSupport.getLogWriter().info("before commit TX1");
+ LogWriterUtils.getLogWriter().info("before commit TX1");
//tx.commit();
- LogWriterSupport.getLogWriter().info("TX1 committed");
+ LogWriterUtils.getLogWriter().info("TX1 committed");
}
catch (CommitConflictException e) {
- LogWriterSupport.getLogWriter().info("TX1 rollbacked");
+ LogWriterUtils.getLogWriter().info("TX1 rollbacked");
}
}
});
@@ -3268,12 +3268,12 @@ public void testOneServer() throws CacheException, InterruptedException {
}
region.putAll(map, "putAllCallback");
try {
- LogWriterSupport.getLogWriter().info("before commit TX2");
+ LogWriterUtils.getLogWriter().info("before commit TX2");
//tx.commit();
- LogWriterSupport.getLogWriter().info("TX2 committed");
+ LogWriterUtils.getLogWriter().info("TX2 committed");
}
catch (CommitConflictException e) {
- LogWriterSupport.getLogWriter().info("TX2 rollbacked");
+ LogWriterUtils.getLogWriter().info("TX2 rollbacked");
}
}
});
@@ -3296,19 +3296,19 @@ public void testOneServer() throws CacheException, InterruptedException {
}
region.putAll(map, "putAllCallback");
try {
- LogWriterSupport.getLogWriter().info("before commit TX3");
+ LogWriterUtils.getLogWriter().info("before commit TX3");
//tx.commit();
- LogWriterSupport.getLogWriter().info("TX3 committed");
+ LogWriterUtils.getLogWriter().info("TX3 committed");
}
catch (CommitConflictException e) {
- LogWriterSupport.getLogWriter().info("TX3 rollbacked");
+ LogWriterUtils.getLogWriter().info("TX3 rollbacked");
}
}
});
- Threads.join(async1, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async2, 30 * 1000, LogWriterSupport.getLogWriter());
- Threads.join(async3, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, 30 * 1000);
+ ThreadUtils.join(async2, 30 * 1000);
+ ThreadUtils.join(async3, 30 * 1000);
// verify server 2 for asyn keys
server2.invoke(new CacheSerializableRunnable(title
@@ -3331,7 +3331,7 @@ public void testOneServer() throws CacheException, InterruptedException {
else if (obj.getPrice() == i + numberOfEntries * 2) {
tx_no = 3;
}
- LogWriterSupport.getLogWriter().info("Verifying TX:" + tx_no);
+ LogWriterUtils.getLogWriter().info("Verifying TX:" + tx_no);
}
if (tx_no == 1) {
assertEquals(i, obj.getPrice());
@@ -3370,7 +3370,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, true, 0, null);
@@ -3397,7 +3397,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3412,7 +3412,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
versions.add(tag);
}
@@ -3432,7 +3432,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
versions.add(tag);
}
return versions;
@@ -3440,9 +3440,9 @@ public void testOneServer() throws CacheException, InterruptedException {
});
assertEquals(numberOfEntries*2, client1Versions.size());
- LogWriterSupport.getLogWriter().info(Arrays.toString(client1Versions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(client1Versions.toArray()));
- LogWriterSupport.getLogWriter().info(Arrays.toString(client2Versions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(client2Versions.toArray()));
for (VersionTag tag : client1Versions) {
if (!client2Versions.contains(tag)) {
@@ -3470,7 +3470,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, true, 0, null);
@@ -3500,7 +3500,7 @@ public void testOneServer() throws CacheException, InterruptedException {
Region region = getRootRegion().getSubregion(regionName);
region.registerInterest("ALL_KEYS");
assertEquals(numberOfEntries, region.size());
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3516,7 +3516,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
versions.add(tag);
}
@@ -3536,7 +3536,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on client for " + key + ": " + tag);
versions.add(tag);
}
return versions;
@@ -3544,9 +3544,9 @@ public void testOneServer() throws CacheException, InterruptedException {
});
assertEquals(numberOfEntries*2, client1RAVersions.size());
- LogWriterSupport.getLogWriter().info(Arrays.toString(client1RAVersions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(client1RAVersions.toArray()));
- LogWriterSupport.getLogWriter().info(Arrays.toString(client2RAVersions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(client2RAVersions.toArray()));
for (VersionTag tag : client1RAVersions) {
if (!client2RAVersions.contains(tag)) {
@@ -3573,7 +3573,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, true, 1, null);
@@ -3592,7 +3592,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3619,7 +3619,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on server1:" + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on server1:" + tag);
versions.add(key + " " + tag);
}
}
@@ -3650,10 +3650,10 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- LogWriterSupport.getLogWriter().info(Arrays.toString(expectedVersions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(expectedVersions.toArray()));
assertEquals(numberOfEntries*2, actualVersions.size());
- LogWriterSupport.getLogWriter().info(Arrays.toString(actualVersions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(actualVersions.toArray()));
for (String keyTag : expectedVersions) {
if (!actualVersions.contains(keyTag)) {
@@ -3681,7 +3681,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort3 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, true, 1, null);
@@ -3700,7 +3700,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
+ LogWriterUtils.getLogWriter().info("client2 registerInterest ALL_KEYS at "+region.getFullPath());
}
});
@@ -3729,7 +3729,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on server1:" + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on server1:" + tag);
versions.add(key + " " + tag);
}
}
@@ -3761,10 +3761,10 @@ public void testOneServer() throws CacheException, InterruptedException {
}
});
- LogWriterSupport.getLogWriter().info(Arrays.toString(expectedRAVersions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(expectedRAVersions.toArray()));
assertEquals(numberOfEntries*2, actualRAVersions.size());
- LogWriterSupport.getLogWriter().info(Arrays.toString(actualRAVersions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(actualRAVersions.toArray()));
for (String keyTag : expectedRAVersions) {
if (!actualRAVersions.contains(keyTag)) {
@@ -3791,7 +3791,7 @@ public void testOneServer() throws CacheException, InterruptedException {
final int serverPort1 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
final int serverPort2 = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
// set notifyBySubscription=true to test register interest
createBridgeServer(server1, regionName, serverPort1, false, 0, null);
@@ -3825,7 +3825,7 @@ public void testOneServer() throws CacheException, InterruptedException {
for (Object key : entries.keySet()) {
RegionEntry internalRegionEntry = entries.getEntry(key);
VersionTag tag = internalRegionEntry.getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("Entry version tag on client:" + tag);
+ LogWriterUtils.getLogWriter().info("Entry version tag on client:" + tag);
versions.add(tag);
}
@@ -3853,9 +3853,9 @@ public void testOneServer() throws CacheException, InterruptedException {
});
assertEquals(numberOfEntries*2, client1Versions.size());
- LogWriterSupport.getLogWriter().info(Arrays.toString(client1Versions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(client1Versions.toArray()));
- LogWriterSupport.getLogWriter().info(Arrays.toString(client2Versions.toArray()));
+ LogWriterUtils.getLogWriter().info(Arrays.toString(client2Versions.toArray()));
for (VersionTag tag : client2Versions) {
tag.setMemberID(null);
@@ -3872,7 +3872,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void run2() throws CacheException {
// Create DS
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
getSystem(config);
// Create Region
@@ -3926,7 +3926,7 @@ public void testOneServer() throws CacheException, InterruptedException {
}
try {
int retPort = startBridgeServer(serverPort);
- LogWriterSupport.getLogWriter().info("Cache Server Started:"+retPort+":"+serverPort);
+ LogWriterUtils.getLogWriter().info("Cache Server Started:"+retPort+":"+serverPort);
} catch (Exception e) {
Assert.fail("While starting CacheServer", e);
}
@@ -4182,7 +4182,7 @@ public void testOneServer() throws CacheException, InterruptedException {
localregion.put(key, newValue);
num_updates ++;
}
- LogWriterSupport.getLogWriter().info("CQListener:TestObject:" + key + ":" + newValue);
+ LogWriterUtils.getLogWriter().info("CQListener:TestObject:" + key + ":" + newValue);
}
public void close() {
@@ -4248,7 +4248,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (closeCacheAtItem != -1 && sc.num_create_event >= closeCacheAtItem) {
closeCacheAsync(vm);
}
- LogWriterSupport.getLogWriter().fine(
+ LogWriterUtils.getLogWriter().fine(
"MyListener:afterCreate " + event.getKey() + ":"
+ event.getNewValue()+":num_create_event="+sc.num_create_event
+ ":eventID="+((EntryEventImpl)event).getEventId());
@@ -4269,7 +4269,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (event.getKey().toString().startsWith("testEndPointSwitch")) {
num_testEndPointSwitch++;
if (num_testEndPointSwitch == testEndPointSwitchNumber) {
- LogWriterSupport.getLogWriter().info("testEndPointSwitch received expected events");
+ LogWriterUtils.getLogWriter().info("testEndPointSwitch received expected events");
synchronized(lockObject3) {
lockObject3.notify();
}
@@ -4278,7 +4278,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (event.getKey().toString().startsWith("testHADRFailOver")) {
num_testHADRFailOver++;
if (num_testHADRFailOver == thousandEntries*2) {
- LogWriterSupport.getLogWriter().info("testHADRFailOver received expected events");
+ LogWriterUtils.getLogWriter().info("testHADRFailOver received expected events");
synchronized(lockObject4) {
lockObject4.notify();
}
@@ -4288,7 +4288,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void afterUpdate(EntryEvent event) {
sc.num_update_event++;
- LogWriterSupport.getLogWriter().fine(
+ LogWriterUtils.getLogWriter().fine(
"MyListener:afterUpdate " + event.getKey() + ":"
+ event.getNewValue()+":"+event.getOldValue()
+":num_update_event="+sc.num_update_event
@@ -4311,7 +4311,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (event.getOldValue() !=null) {
num_oldValueInAfterUpdate++;
if (num_oldValueInAfterUpdate == numberOfEntries) {
- LogWriterSupport.getLogWriter().info("received expected OldValue events");
+ LogWriterUtils.getLogWriter().info("received expected OldValue events");
synchronized(lockObject) {
lockObject.notify();
}
@@ -4322,7 +4322,7 @@ public void testOneServer() throws CacheException, InterruptedException {
public void afterInvalidate(EntryEvent event) {
sc.num_invalidate_event++;
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("local invalidate is triggered for " + event.getKey()+":num_invalidte_event="+sc.num_invalidate_event);
}
@@ -4331,7 +4331,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (event.getOperation().isRemoveAll()) {
assertEquals("removeAllCallback", event.getCallbackArgument());
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("local destroy is triggered for " + event.getKey()+":num_invalidte_event="+sc.num_destroy_event);
}
}
@@ -4360,7 +4360,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (exceptionAtItem != -1 && num_created >= exceptionAtItem) {
throw new CacheWriterException("Triggered exception as planned, created "+num_created+" keys.");
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"MyWriter:beforeCreate " + event.getKey() + ":"
+ event.getNewValue() + "num_created=" + num_created);
@@ -4381,7 +4381,7 @@ public void testOneServer() throws CacheException, InterruptedException {
}
public void beforeUpdate(EntryEvent event) {
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"MyWriter:beforeUpdate " + event.getKey() + ":"
+ event.getNewValue());
@@ -4405,7 +4405,7 @@ public void testOneServer() throws CacheException, InterruptedException {
if (exceptionAtItem != -1 && num_destroyed >= exceptionAtItem) {
throw new CacheWriterException("Triggered exception as planned, destroyed "+num_destroyed+" keys.");
}
- LogWriterSupport.getLogWriter().info("MyWriter:beforeDestroy " + event.getKey() + ":" + "num_destroyed=" + num_destroyed);
+ LogWriterUtils.getLogWriter().info("MyWriter:beforeDestroy " + event.getKey() + ":" + "num_destroyed=" + num_destroyed);
if (event.getOperation().isRemoveAll()) {
assertEquals("removeAllCallback", event.getCallbackArgument());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/RemoteCQTransactionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/RemoteCQTransactionDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/RemoteCQTransactionDUnitTest.java
index 1c9e6d8..4bca2c4 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/RemoteCQTransactionDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/RemoteCQTransactionDUnitTest.java
@@ -71,7 +71,7 @@ import com.gemstone.gemfire.internal.cache.execute.data.Order;
import com.gemstone.gemfire.internal.cache.execute.data.OrderId;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -740,7 +740,7 @@ public class RemoteCQTransactionDUnitTest extends CacheTestCase {
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port);
ccf.setPoolSubscriptionEnabled(true);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<Integer, String> crf = cCache
.createClientRegionFactory(isEmpty ? ClientRegionShortcut.PROXY
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/CQListGIIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/CQListGIIDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/CQListGIIDUnitTest.java
index 9ce8682..a252e4c 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/CQListGIIDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/CQListGIIDUnitTest.java
@@ -60,7 +60,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -333,7 +333,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
public static void createClientCache(Integer port1, Integer port2,
Integer port3, String rLevel, Boolean addListener) throws Exception {
CacheServerTestUtil.disableShufflingOfEndpoints();
- String host = NetworkSupport.getIPLiteral();
+ String host = NetworkUtils.getIPLiteral();
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
@@ -378,7 +378,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
/* Register CQs */
public static void createCQ(String cqName, String queryStr) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -389,7 +389,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = { new CqQueryTestListener(com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()) };
+ CqListener[] cqListeners = { new CqQueryTestListener(com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()) };
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -401,7 +401,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
}
catch (Exception ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to create CQ " + cqName
+ " . ");
@@ -411,7 +411,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
}
public static void executeCQ(String cqName, Boolean initialResults) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### DEBUG EXECUTE CQ START ####");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### DEBUG EXECUTE CQ START ####");
// Get CQ Service.
QueryService cqService = null;
CqQuery cq1 = null;
@@ -421,19 +421,19 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
try {
cq1 = cqService.getCq(cqName);
if (cq1 == null) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Failed to get CqQuery object for CQ name: " + cqName);
Assert.fail("Failed to get CQ " + cqName, new Exception("Failed to get CQ "
+ cqName));
}
else {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
}
}
catch (Exception ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().error(ex);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().error(ex);
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
throw err;
@@ -446,14 +446,14 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
cqResults = cq1.executeWithInitialResults();
}
catch (Exception ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
err.initCause(ex);
throw err;
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
assertTrue("executeWithInitialResults() state mismatch", cq1.getState()
.isRunning());
// if (expectedResultsSize >= 0) {
@@ -467,7 +467,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
cq1.execute();
}
catch (Exception ex) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ "
+ cqName);
@@ -484,7 +484,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
try {
region = cache.getRegion("root").getSubregion(regionName);
region.getAttributesMutator().setCacheListener(
- new CertifiableTestCacheListener(com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter()));
+ new CertifiableTestCacheListener(com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter()));
}
catch (Exception cqe) {
AssertionError err = new AssertionError("Failed to get Region.");
@@ -594,7 +594,7 @@ public class CQListGIIDUnitTest extends DistributedTestCase {
for (int i = 0; i < num.longValue(); i++) {
r.put(KEY + i, new Portfolio(i + 1));
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"### Number of Entries in Region " + rName + ": " + r.keys().size());
}
catch (Exception ex) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADispatcherDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADispatcherDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADispatcherDUnitTest.java
index 4934966..a8f23dd 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADispatcherDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/ha/HADispatcherDUnitTest.java
@@ -56,8 +56,8 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ConflationDUnitTest;
import com.gemstone.gemfire.internal.cache.tier.sockets.HAEventWrapper;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -147,12 +147,12 @@ public class HADispatcherDUnitTest extends DistributedTestCase
client2.invoke( CacheServerTestUtil.class, "disableShufflingOfEndpoints");
client1.invoke(HADispatcherDUnitTest.class, "createClientCache",
new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)),
+ NetworkUtils.getServerHostName(Host.getHost(0)),
new Integer(PORT1), new Integer(PORT2),
new Boolean(false) });
client2.invoke(HADispatcherDUnitTest.class, "createClientCache",
new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)),
+ NetworkUtils.getServerHostName(Host.getHost(0)),
new Integer(PORT1), new Integer(PORT2),
new Boolean(true) });
//createClientCache(new Integer(PORT1), new Integer(PORT2), new Boolean(true) );
@@ -450,7 +450,7 @@ public class HADispatcherDUnitTest extends DistributedTestCase
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
cqf.initCqListeners(cqListeners);
CqAttributes cqa = cqf.create();
@@ -462,7 +462,7 @@ public class HADispatcherDUnitTest extends DistributedTestCase
CqQuery cq1 = cqService.newCq(cqName, queryStr, cqa);
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CQService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CQService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to create/execute CQ " + cqName + " . ");
err.initCause(ex);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientToServerDeltaDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientToServerDeltaDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientToServerDeltaDUnitTest.java
index ac4df17..1ad1e8f 100755
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientToServerDeltaDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ClientToServerDeltaDUnitTest.java
@@ -52,7 +52,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegionLocalMaxMemoryDUnitTest.TestObject1;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -164,11 +164,11 @@ public class ClientToServerDeltaDUnitTest extends DistributedTestCase {
Boolean.TRUE, Boolean.FALSE, clone, enableDelta })).intValue();
client.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.FALSE, Boolean.FALSE, Boolean.FALSE });
client2.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server2.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server2.getHost()),
new Integer(PORT2), Boolean.TRUE, Boolean.FALSE, cq, queries, RI });
}
@@ -183,11 +183,11 @@ public class ClientToServerDeltaDUnitTest extends DistributedTestCase {
Boolean.FALSE, Boolean.FALSE, clone, enableDelta })).intValue();
client.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.FALSE, Boolean.FALSE, Boolean.FALSE });
client2.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server2.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server2.getHost()),
new Integer(PORT2), Boolean.TRUE, Boolean.FALSE, cq, queries, RI });
}
@@ -716,7 +716,7 @@ public class ClientToServerDeltaDUnitTest extends DistributedTestCase {
new Object[] { Boolean.FALSE, Boolean.FALSE });
client.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.FALSE, Boolean.TRUE, Boolean.FALSE });
/* server2.invoke(ClientToServerDeltaDUnitTest.class, "setFirstSecondUpdate",
@@ -749,7 +749,7 @@ public class ClientToServerDeltaDUnitTest extends DistributedTestCase {
new Object[] { Boolean.FALSE, Boolean.FALSE });
client.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.FALSE, Boolean.TRUE, Boolean.FALSE });
client.invoke(ClientToServerDeltaDUnitTest.class, "putDelta",
@@ -781,11 +781,11 @@ public class ClientToServerDeltaDUnitTest extends DistributedTestCase {
new Object[] { Boolean.FALSE, Boolean.FALSE });
client.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.TRUE, Boolean.TRUE, Boolean.FALSE });
client2.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.TRUE, Boolean.FALSE, Boolean.FALSE });
int deltaSent = (Integer)server2.invoke(
@@ -812,7 +812,7 @@ public class ClientToServerDeltaDUnitTest extends DistributedTestCase {
new Object[] { Boolean.FALSE, Boolean.FALSE });
client.invoke(ClientToServerDeltaDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server.getHost()), new Integer(PORT1),
+ new Object[] { NetworkUtils.getServerHostName(server.getHost()), new Integer(PORT1),
Boolean.FALSE, Boolean.FALSE, Boolean.FALSE});
client.invoke(ClientToServerDeltaDUnitTest.class, "putDelta",
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaPropagationWithCQDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaPropagationWithCQDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaPropagationWithCQDUnitTest.java
index c467604..d30b510 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaPropagationWithCQDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DeltaPropagationWithCQDUnitTest.java
@@ -54,7 +54,7 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -129,10 +129,10 @@ public class DeltaPropagationWithCQDUnitTest extends DistributedTestCase {
// 2. setup a client
client1
.invoke(DeltaPropagationWithCQDUnitTest.class, "createClientCache",
- new Object[] {NetworkSupport.getServerHostName(server1.getHost()), port,
+ new Object[] {NetworkUtils.getServerHostName(server1.getHost()), port,
Boolean.TRUE});
// 3. setup another client with cqs and interest in all keys.
- createClientCache(NetworkSupport.getServerHostName(server1.getHost()), port, true);
+ createClientCache(NetworkUtils.getServerHostName(server1.getHost()), port, true);
registerCQs(1, "CQWithInterestDUnitTest_cq");
// 4. put a key on client1
client1.invoke(DeltaPropagationWithCQDUnitTest.class, "doPut", new Object[] {
@@ -167,10 +167,10 @@ public class DeltaPropagationWithCQDUnitTest extends DistributedTestCase {
// 2. setup a client with register interest
client1
.invoke(DeltaPropagationWithCQDUnitTest.class, "createClientCache",
- new Object[] {NetworkSupport.getServerHostName(server1.getHost()), port,
+ new Object[] {NetworkUtils.getServerHostName(server1.getHost()), port,
Boolean.TRUE});
// 3. setup another client with cqs but without interest.
- createClientCache(NetworkSupport.getServerHostName(server1.getHost()), port, false/*RI*/);
+ createClientCache(NetworkUtils.getServerHostName(server1.getHost()), port, false/*RI*/);
for (int i = 0; i < numOfCQs; i++) {
registerCQs(numOfListeners, "Query_"+i);
}
[06/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryUsingPoolDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryUsingPoolDUnitTest.java
index 12ff8a1..2f317f2 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryUsingPoolDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqQueryUsingPoolDUnitTest.java
@@ -70,8 +70,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -208,7 +208,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setMirrorType(mirrorType);
@@ -251,7 +251,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
//AttributesFactory factory = new AttributesFactory();
//factory.setScope(Scope.DISTRIBUTED_ACK);
//factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -270,7 +270,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
//assertTrue(getSystem().getDistributionManager().getOtherDistributionManagerIds().size() > 0);
for (int i = 0; i < regions.length; i++) {
Region r = createRegion(regions[i], attr.create());
- LogWriterSupport.getLogWriter().info("Server created the region: "+r);
+ LogWriterUtils.getLogWriter().info("Server created the region: "+r);
}
try {
startBridgeServer(port, true);
@@ -290,7 +290,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
public void closeServer(VM server) {
server.invoke(new SerializableRunnable("Close CacheServer") {
public void run() {
- LogWriterSupport.getLogWriter().info("### Close CacheServer. ###");
+ LogWriterUtils.getLogWriter().info("### Close CacheServer. ###");
stopBridgeServer(getCache());
}
});
@@ -308,7 +308,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
//Region region1 = null;
// Initialize CQ Service.
try {
@@ -332,7 +332,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
}
for (int i=0; i < regions.length; i++) {
createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + regions[i]);
//region1.getAttributesMutator().setCacheListener(new CqListener());
}
}
@@ -347,11 +347,11 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
SerializableRunnable closeCQService =
new CacheSerializableRunnable("Close Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close Client. ###");
+ LogWriterUtils.getLogWriter().info("### Close Client. ###");
try {
((DefaultQueryService)getCache().getQueryService()).closeCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to get CqService during ClientClose() ###");
+ LogWriterUtils.getLogWriter().info("### Failed to get CqService during ClientClose() ###");
}
}
@@ -368,12 +368,12 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
try {
qs = getCache().getQueryService();
}catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to get CqService during ClientClose() ###");
+ LogWriterUtils.getLogWriter().info("### Failed to get CqService during ClientClose() ###");
}
try {
qs.createIndex(indexName, IndexType.FUNCTIONAL, indexedExpression, fromClause);
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to create Index :" + indexName);
+ LogWriterUtils.getLogWriter().info("### Failed to create Index :" + indexName);
}
}
});
@@ -387,7 +387,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.put(KEY+i, new Portfolio(i, i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -402,7 +402,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
portfolio.createTime = System.currentTimeMillis();
region1.put(KEY+i, portfolio);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -415,7 +415,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.destroy(KEY+i);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
}
});
@@ -431,7 +431,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.invalidate(KEY+i);
}
- LogWriterSupport.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries In Region after Delete :" + region1.keys().size());
}
});
@@ -461,7 +461,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
}
for (int i=0; i < servers.length; i++){
- LogWriterSupport.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
+ LogWriterUtils.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
cpf.addServer(servers[i], ports[i]);
}
@@ -478,7 +478,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
//getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -488,7 +488,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -501,7 +501,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("QueryService is :" + qService, err);
+ LogWriterUtils.getLogWriter().info("QueryService is :" + qService, err);
throw err;
}
}
@@ -516,7 +516,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
//getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -526,7 +526,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -539,7 +539,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("QueryService is :" + qService, err);
+ LogWriterUtils.getLogWriter().info("QueryService is :" + qService, err);
throw err;
}
}
@@ -551,10 +551,10 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Create CQ with no name:" ) {
public void run2() throws CacheException {
//pause(60 * 1000);
- LogWriterSupport.getLogWriter().info("### DEBUG CREATE CQ START ####");
+ LogWriterUtils.getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ with no name. ###");
+ LogWriterUtils.getLogWriter().info("### Create CQ with no name. ###");
// Get CQ Service.
QueryService qService = null;
CqQuery cq1 = null;
@@ -570,7 +570,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
for (int i = 0; i < 20; ++i) {
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
cqf.initCqListeners(cqListeners);
CqAttributes cqa = cqf.create();
@@ -580,38 +580,38 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
cq1 = qService.newCq(queryStr, cqa);
((CqQueryTestListener)cqListeners[0]).cqName = cq1.getName();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CQService is :" + qService);
+ LogWriterUtils.getLogWriter().info("CQService is :" + qService);
Assert.fail("Failed to create CQ with no name" + " . ", ex);
}
if (cq1 == null) {
- LogWriterSupport.getLogWriter().info("Failed to get CqQuery object for CQ with no name.");
+ LogWriterUtils.getLogWriter().info("Failed to get CqQuery object for CQ with no name.");
}
else {
cqName = cq1.getName();
- LogWriterSupport.getLogWriter().info("Created CQ with no name, generated CQ name: " + cqName + " CQ state:" + cq1.getState());
+ LogWriterUtils.getLogWriter().info("Created CQ with no name, generated CQ name: " + cqName + " CQ state:" + cq1.getState());
assertTrue("Create CQ with no name illegal state", cq1.getState().isStopped());
}
if ( i%2 == 0) {
try {
cqResults = cq1.executeWithInitialResults();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + qService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + qService);
Assert.fail("Failed to execute CQ with initial results, cq name: "
+ cqName + " . ", ex);
}
- LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
- LogWriterSupport.getLogWriter().info("CQ state after execute with initial results = " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
+ LogWriterUtils.getLogWriter().info("CQ state after execute with initial results = " + cq1.getState());
assertTrue("executeWithInitialResults() state mismatch", cq1.getState().isRunning());
}
else {
try {
cq1.execute();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CQService is :" + qService);
+ LogWriterUtils.getLogWriter().info("CQService is :" + qService);
Assert.fail("Failed to execute CQ " + cqName + " . ", ex);
}
- LogWriterSupport.getLogWriter().info("CQ state after execute = " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("CQ state after execute = " + cq1.getState());
assertTrue("execute() state mismatch", cq1.getState().isRunning());
}
@@ -619,7 +619,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
try {
cq1.close();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + qService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + qService);
Assert.fail("Failed to close CQ " + cqName + " . ", ex);
}
assertTrue("closeCq() state mismatch", cq1.getState().isClosed());
@@ -648,7 +648,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
private void work() throws CacheException {
//pause(60 * 1000);
- LogWriterSupport.getLogWriter().info("### DEBUG EXECUTE CQ START ####");
+ LogWriterUtils.getLogWriter().info("### DEBUG EXECUTE CQ START ####");
//pause(20 * 1000);
// Get CQ Service.
@@ -668,16 +668,16 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
try {
cq1 = cqService.getCq(cqName);
if (cq1 == null) {
- LogWriterSupport.getLogWriter().info("Failed to get CqQuery object for CQ name: " + cqName);
+ LogWriterUtils.getLogWriter().info("Failed to get CqQuery object for CQ name: " + cqName);
fail("Failed to get CQ " + cqName);
}
else {
- LogWriterSupport.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
+ LogWriterUtils.getLogWriter().info("Obtained CQ, CQ name: " + cq1.getName());
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
}
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
- LogWriterSupport.getLogWriter().error(ex);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().error(ex);
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
throw err;
@@ -689,13 +689,13 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
try {
cqResults = cq1.executeWithInitialResults();
} catch (Exception ex){
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService);
ex.printStackTrace();
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
throw err;
}
- LogWriterSupport.getLogWriter().info("initial result size = " + cqResults.size());
+ LogWriterUtils.getLogWriter().info("initial result size = " + cqResults.size());
assertTrue("executeWithInitialResults() state mismatch", cq1.getState().isRunning());
if (expectedResultsSize >= 0) {
assertEquals("Unexpected results size for CQ: " + cqName +
@@ -725,7 +725,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
AssertionError err = new AssertionError("Failed to execute CQ " + cqName);
err.initCause(ex);
if (expectedErr == null) {
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
}
throw err;
}
@@ -755,7 +755,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
public void stopCQ(VM vm, final String cqName) throws Exception {
vm.invoke(new CacheSerializableRunnable("Stop CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Stop CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Stop CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -783,7 +783,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Stop CQ :" + cqName) {
public void run2() throws CacheException {
CqQuery cq1 = null;
- LogWriterSupport.getLogWriter().info("### Stop and Exec CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Stop and Exec CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -807,8 +807,8 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
Assert.fail("Count = " + i + "Failed to stop CQ " + cqName + " . ", ex);
}
assertTrue("Stop CQ state mismatch, count = " + i, cq1.getState().isStopped());
- LogWriterSupport.getLogWriter().info("After stop in Stop and Execute loop, ran successfully, loop count: " + i);
- LogWriterSupport.getLogWriter().info("CQ state: " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("After stop in Stop and Execute loop, ran successfully, loop count: " + i);
+ LogWriterUtils.getLogWriter().info("CQ state: " + cq1.getState());
// Re-execute CQ
try {
@@ -817,8 +817,8 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
Assert.fail("Count = " + i + "Failed to execute CQ " + cqName + " . ", ex);
}
assertTrue("Execute CQ state mismatch, count = " + i, cq1.getState().isRunning());
- LogWriterSupport.getLogWriter().info("After execute in Stop and Execute loop, ran successfully, loop count: " + i);
- LogWriterSupport.getLogWriter().info("CQ state: " + cq1.getState());
+ LogWriterUtils.getLogWriter().info("After execute in Stop and Execute loop, ran successfully, loop count: " + i);
+ LogWriterUtils.getLogWriter().info("CQ state: " + cq1.getState());
}
}
});
@@ -829,7 +829,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
public void closeCQ(VM vm, final String cqName) throws Exception {
vm.invoke(new CacheSerializableRunnable("Close CQ :" + cqName) {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Close CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -860,7 +860,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
Region region = null;
try {
region = getRootRegion().getSubregion(regionName);
- region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterSupport.getLogWriter()));
+ region.getAttributesMutator().setCacheListener(new CertifiableTestCacheListener(LogWriterUtils.getLogWriter()));
} catch (Exception cqe) {
AssertionError err = new AssertionError("Failed to get Region.");
err.initCause(cqe);
@@ -919,7 +919,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
private void failIfCQExists(VM vm, final String cqName) {
vm.invoke(new CacheSerializableRunnable("Fail if CQ exists") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Fail if CQ Exists. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Fail if CQ Exists. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -941,7 +941,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -990,7 +990,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
final int totalEvents) {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ. ### " + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1217,7 +1217,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
public void validateQuery(VM vm, final String query, final int resultSize) {
vm.invoke(new CacheSerializableRunnable("Validate Query") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating Query. ###");
+ LogWriterUtils.getLogWriter().info("### Validating Query. ###");
QueryService qs = getCache().getQueryService();
Query q = qs.newQuery(query);
@@ -1225,7 +1225,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
Object r = q.execute();
if(r instanceof Collection){
int rSize = ((Collection)r).size();
- LogWriterSupport.getLogWriter().info("### Result Size is :" + rSize);
+ LogWriterUtils.getLogWriter().info("### Result Size is :" + rSize);
assertEquals(rSize, resultSize);
}
}
@@ -1276,7 +1276,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
vm.invoke(new CacheSerializableRunnable("Stop CQ :" + cqName) {
public void run2() throws CacheException {
CqQuery cq1 = null;
- LogWriterSupport.getLogWriter().info("### CQ attributes mutator for ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### CQ attributes mutator for ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1346,7 +1346,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testInterestListAndCQs";
createPool(client, poolName, host0, thePort);
@@ -1538,7 +1538,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQStopExecute";
createPool(client, poolName, host0, thePort);
@@ -1625,7 +1625,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQAttributesMutator";
createPool(client, poolName, host0, thePort);
@@ -1723,7 +1723,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQCreateClose";
System.out.println("##### Pool Name :" + poolName + " host :" + host0 + " port :" + thePort);
@@ -1822,13 +1822,13 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
client.invoke(new CacheSerializableRunnable("CloseAll CQ :") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close All CQ. ###");
+ LogWriterUtils.getLogWriter().info("### Close All CQ. ###");
// Get CQ Service.
QueryService cqService = null;
try {
cqService = getCache().getQueryService();
} catch (Exception cqe) {
- LogWriterSupport.getLogWriter().info("Failed to getCQService.", cqe);
+ LogWriterUtils.getLogWriter().info("Failed to getCQService.", cqe);
Assert.fail("Failed to getCQService.", cqe);
}
@@ -1836,7 +1836,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
try {
cqService.closeCqs();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to close All CQ.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to close All CQ.", ex);
Assert.fail("Failed to close All CQ. ", ex);
}
}
@@ -1856,7 +1856,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
// Call close all CQ.
client.invoke(new CacheSerializableRunnable("CloseAll CQ 2 :") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close All CQ 2. ###");
+ LogWriterUtils.getLogWriter().info("### Close All CQ 2. ###");
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1892,7 +1892,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testRegionDestroy";
createPool(client, poolName, host0, thePort);
@@ -1997,7 +1997,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
/* Create Server and Client */
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName1 = "testCQWithMultipleClients1";
String poolName2 = "testCQWithMultipleClients2";
@@ -2150,7 +2150,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQResultSet";
createPool(client, poolName, host0, thePort);
@@ -2224,7 +2224,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQEvents";
createPool(client, poolName, host0, thePort);
@@ -2329,7 +2329,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQEvents";
createPool(client, poolName, host0, thePort);
@@ -2431,7 +2431,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testEnableDisableCQ";
createPool(client, poolName, host0, thePort);
@@ -2572,7 +2572,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testQuery";
createPool(client, poolName, host0, thePort);
@@ -2617,7 +2617,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Create client.
// Properties props = new Properties();
// Create client with redundancyLevel -1
@@ -2711,7 +2711,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
@@ -2811,7 +2811,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server1);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
@@ -2848,7 +2848,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
server2.invoke(new CacheSerializableRunnable("Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -2943,7 +2943,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server2);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int thePort2 = server2.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
@@ -3003,7 +3003,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
createServer(server);
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
// createClient(client, thePort, host0);
@@ -3078,7 +3078,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
final int thePort = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testQueryWithNULLInWhereClause";
createPool(client, poolName, host0, thePort);
@@ -3133,7 +3133,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
// Create region with Global scope
AttributesFactory factory1 = new AttributesFactory();
@@ -3167,7 +3167,7 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int thePort2 = server2.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
@@ -3234,14 +3234,14 @@ public class CqQueryUsingPoolDUnitTest extends CacheTestCase {
server.invoke(new CacheSerializableRunnable("Server Region Entries") {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("### Entries in Server :" + region.keys().size());
+ LogWriterUtils.getLogWriter().info("### Entries in Server :" + region.keys().size());
}
});
client.invoke(new CacheSerializableRunnable("Client Region Entries") {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("### Entries in Client :" + region.keys().size());
+ LogWriterUtils.getLogWriter().info("### Entries in Client :" + region.keys().size());
}
});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolDUnitTest.java
index 7690f32..7104947 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolDUnitTest.java
@@ -35,8 +35,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -161,7 +161,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCqResults";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -216,7 +216,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCqResults";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -261,7 +261,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCqResults";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -320,7 +320,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCqResults";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -377,7 +377,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCqResults";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -426,7 +426,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCqResults";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -495,7 +495,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCqResults";
final String cqName = "testCqResultsP_0";
@@ -558,7 +558,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -581,7 +581,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
if (cqQuery.getName().equals(cqName)) {
int size = cqQuery.getCqResultKeysSize();
if (size != totalObjects) {
- LogWriterSupport.getLogWriter().info("The number of Cached events " + size +
+ LogWriterUtils.getLogWriter().info("The number of Cached events " + size +
" is not equal to the expected size " + totalObjects);
HashSet expectedKeys = new HashSet();
for (int i = 1; i < totalObjects; i++) {
@@ -589,7 +589,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
}
Set cachedKeys = cqQuery.getCqResultKeyCache();
expectedKeys.removeAll(cachedKeys);
- LogWriterSupport.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
+ LogWriterUtils.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
}
assertEquals("The number of keys cached for cq " + cqName + " is wrong.",
totalObjects, cqQuery.getCqResultKeysSize());
@@ -619,7 +619,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCqResults";
final String cqName1 = "testCqResultsP_0";
@@ -687,7 +687,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -709,7 +709,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
ServerCQImpl cqQuery = (ServerCQImpl)cq;
int size = cqQuery.getCqResultKeysSize();
if (size != totalObjects) {
- LogWriterSupport.getLogWriter().info("The number of Cached events " + size +
+ LogWriterUtils.getLogWriter().info("The number of Cached events " + size +
" is not equal to the expected size " + totalObjects);
HashSet expectedKeys = new HashSet();
for (int i = 1; i < totalObjects; i++) {
@@ -717,7 +717,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
}
Set cachedKeys = cqQuery.getCqResultKeyCache();
expectedKeys.removeAll(cachedKeys);
- LogWriterSupport.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
+ LogWriterUtils.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
}
assertEquals("The number of keys cached for cq " + cqQuery.getName() + " is wrong.",
totalObjects, cqQuery.getCqResultKeysSize());
@@ -748,7 +748,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCqResults";
final String cqName = "testCqResultsP_0";
@@ -811,7 +811,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -861,7 +861,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(CqQueryUsingPoolDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
String poolName = "testCqResults";
final String cqName = "testCqResultsCachingForDestroyEventsOnPR_0";
@@ -922,7 +922,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -942,7 +942,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -978,7 +978,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
cqDUnitTest.createServer(server1);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
String poolName = "testCQFailOver";
@@ -1039,7 +1039,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -1062,7 +1062,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
if (cqQuery.getName().equals(cqName)) {
int size = cqQuery.getCqResultKeysSize();
if (size != totalObjects) {
- LogWriterSupport.getLogWriter().info("The number of Cached events " + size +
+ LogWriterUtils.getLogWriter().info("The number of Cached events " + size +
" is not equal to the expected size " + totalObjects);
HashSet expectedKeys = new HashSet();
for (int i = 1; i < totalObjects; i++) {
@@ -1070,7 +1070,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
}
Set cachedKeys = cqQuery.getCqResultKeyCache();
expectedKeys.removeAll(cachedKeys);
- LogWriterSupport.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
+ LogWriterUtils.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
}
assertEquals("The number of keys cached for cq " + cqName + " is wrong.",
totalObjects, cqQuery.getCqResultKeysSize());
@@ -1096,7 +1096,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
try {
cqService = ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqService.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqService.", ex);
Assert.fail ("Failed to get the internal CqService.", ex);
}
@@ -1119,7 +1119,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
if (cqQuery.getName().equals(cqName)) {
int size = cqQuery.getCqResultKeysSize();
if (size != totalObjects) {
- LogWriterSupport.getLogWriter().info("The number of Cached events " + size +
+ LogWriterUtils.getLogWriter().info("The number of Cached events " + size +
" is not equal to the expected size " + totalObjects);
HashSet expectedKeys = new HashSet();
for (int i = 1; i < totalObjects; i++) {
@@ -1127,7 +1127,7 @@ public class CqResultSetUsingPoolDUnitTest extends CacheTestCase {
}
Set cachedKeys = cqQuery.getCqResultKeyCache();
expectedKeys.removeAll(cachedKeys);
- LogWriterSupport.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
+ LogWriterUtils.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
}
assertEquals("The number of keys cached for cq " + cqName + " is wrong.",
totalObjects, cqQuery.getCqResultKeysSize());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolOptimizedExecuteDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolOptimizedExecuteDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolOptimizedExecuteDUnitTest.java
index 430e18d..facb3d9 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolOptimizedExecuteDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqResultSetUsingPoolOptimizedExecuteDUnitTest.java
@@ -33,8 +33,8 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -80,7 +80,7 @@ public class CqResultSetUsingPoolOptimizedExecuteDUnitTest extends CqResultSetUs
cqDUnitTest.createServer(server1);
final int port1 = server1.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(1);
String poolName = "testCQFailOver";
@@ -141,7 +141,7 @@ public class CqResultSetUsingPoolOptimizedExecuteDUnitTest extends CqResultSetUs
try {
CqServiceImpl = (com.gemstone.gemfire.cache.query.internal.cq.CqServiceImpl) ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqServiceImpl.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqServiceImpl.", ex);
Assert.fail ("Failed to get the internal CqServiceImpl.", ex);
}
@@ -164,7 +164,7 @@ public class CqResultSetUsingPoolOptimizedExecuteDUnitTest extends CqResultSetUs
if (cqQuery.getName().equals(cqName)) {
int size = cqQuery.getCqResultKeysSize();
if (size != totalObjects) {
- LogWriterSupport.getLogWriter().info("The number of Cached events " + size +
+ LogWriterUtils.getLogWriter().info("The number of Cached events " + size +
" is not equal to the expected size " + totalObjects);
HashSet expectedKeys = new HashSet();
for (int i = 1; i < totalObjects; i++) {
@@ -172,7 +172,7 @@ public class CqResultSetUsingPoolOptimizedExecuteDUnitTest extends CqResultSetUs
}
Set cachedKeys = cqQuery.getCqResultKeyCache();
expectedKeys.removeAll(cachedKeys);
- LogWriterSupport.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
+ LogWriterUtils.getLogWriter().info("Missing keys from the Cache : " + expectedKeys);
}
assertEquals("The number of keys cached for cq " + cqName + " is wrong.",
totalObjects, cqQuery.getCqResultKeysSize());
@@ -198,7 +198,7 @@ public class CqResultSetUsingPoolOptimizedExecuteDUnitTest extends CqResultSetUs
try {
CqServiceImpl = (CqServiceImpl) ((DefaultQueryService)getCache().getQueryService()).getCqService();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("Failed to get the internal CqServiceImpl.", ex);
+ LogWriterUtils.getLogWriter().info("Failed to get the internal CqServiceImpl.", ex);
Assert.fail ("Failed to get the internal CqServiceImpl.", ex);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStateDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStateDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStateDUnitTest.java
index d59f359..7c1e8f2 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStateDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStateDUnitTest.java
@@ -24,10 +24,9 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -60,7 +59,7 @@ public class CqStateDUnitTest extends HelperTestCase {
startCacheServer(serverA, ports[0], getAuthenticatedServerProperties());
createReplicatedRegion(serverA, regionName, null);
- final String host0 = NetworkSupport.getServerHostName(serverA.getHost());
+ final String host0 = NetworkUtils.getServerHostName(serverA.getHost());
startClient(client, new VM[]{ serverA, serverB }, ports, 1, getClientProperties());
createCQ(client, cqName, "select * from /"+ regionName, null);
@@ -70,7 +69,7 @@ public class CqStateDUnitTest extends HelperTestCase {
startCacheServers(serverB);
AsyncInvocation async = executeCQ(client, cqName);
- Threads.join(async, 10000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 10000);
Boolean clientRunning = (Boolean) client.invoke(new SerializableCallable() {
@Override
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsDUnitTest.java
index dcb5d25..a006f46 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsDUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -78,7 +78,7 @@ public class CqStatsDUnitTest extends CacheTestCase {
final int cqListenerInvocations) {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ Stats. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ Stats. ### " + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -161,7 +161,7 @@ public class CqStatsDUnitTest extends CacheTestCase {
final int clientsWithCqs) {
vm.invoke(new CacheSerializableRunnable("Validate CQ Service Stats") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ Service Stats. ### ");
+ LogWriterUtils.getLogWriter().info("### Validating CQ Service Stats. ### ");
// Get CQ Service.
QueryService qService = null;
try {
@@ -249,7 +249,7 @@ public class CqStatsDUnitTest extends CacheTestCase {
/* Init Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client, port, host0);
/* Create CQs. */
@@ -340,7 +340,7 @@ public class CqStatsDUnitTest extends CacheTestCase {
/* Init Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
cqDUnitTest.createClient(client1, port, host0);
cqDUnitTest.createClient(client2, port, host0);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsUsingPoolDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsUsingPoolDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsUsingPoolDUnitTest.java
index 305b8e6..869ea81 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsUsingPoolDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/CqStatsUsingPoolDUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -78,7 +78,7 @@ public class CqStatsUsingPoolDUnitTest extends CacheTestCase {
final int cqListenerInvocations) {
vm.invoke(new CacheSerializableRunnable("Validate CQs") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ Stats. ### " + cqName);
+ LogWriterUtils.getLogWriter().info("### Validating CQ Stats. ### " + cqName);
// Get CQ Service.
QueryService qService = null;
try {
@@ -161,7 +161,7 @@ public class CqStatsUsingPoolDUnitTest extends CacheTestCase {
final int clientsWithCqs) {
vm.invoke(new CacheSerializableRunnable("Validate CQ Service Stats") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Validating CQ Service Stats. ### ");
+ LogWriterUtils.getLogWriter().info("### Validating CQ Service Stats. ### ");
// Get CQ Service.
QueryService qService = null;
try {
@@ -249,7 +249,7 @@ public class CqStatsUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName = "testCQStatistics";
cqDUnitTest.createPool(client, poolName, host0, port);
@@ -344,7 +344,7 @@ public class CqStatsUsingPoolDUnitTest extends CacheTestCase {
/* Init Server and Client */
cqDUnitTest.createServer(server);
final int port = server.invokeInt(CqQueryUsingPoolDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
String poolName1 = "testCQServiceStatistics1";
String poolName2 = "testCQServiceStatistics2";
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryDUnitTest.java
index 3585de2..c9aa564 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryDUnitTest.java
@@ -48,8 +48,8 @@ import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -129,7 +129,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
// create client
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
@@ -191,7 +191,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
// create client
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
@@ -304,7 +304,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
// create client
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
@@ -395,7 +395,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
// creating an accessor vm with Bridge Server installed.
createServer(server1);
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
@@ -493,7 +493,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
// create client
final int port = server2.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server2.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server2.getHost());
createClient(client, port, host0);
@@ -588,7 +588,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
@@ -684,7 +684,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
@@ -780,7 +780,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
createClient(client2, port, host0);
@@ -882,7 +882,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
Wait.pause(2000);
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
createClient(client2, port, host0);
@@ -1023,7 +1023,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
createClient(client, port, host0);
createClient(client2, port, host0);
@@ -1241,7 +1241,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
createServer(server2);
final int port = server1.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server1.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server1.getHost());
// Initialize Client.
createClient(client, port, host0);
@@ -1374,7 +1374,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
createServerWithoutRootRegion(server, 0, false, 0);
final int port = server.invokeInt(PartitionedRegionCqQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Initialize Client.
createCacheClient(client1, port, host0);
@@ -1499,7 +1499,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
//AttributesFactory factory = new AttributesFactory();
//factory.setScope(Scope.DISTRIBUTED_ACK);
//factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -1518,7 +1518,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
//assertTrue(getSystem().getDistributionManager().getOtherDistributionManagerIds().size() > 0);
for (int i = 0; i < regions.length; i++) {
Region r = createRegion(regions[i], attr.create());
- LogWriterSupport.getLogWriter().info("Server created the region: "+r);
+ LogWriterUtils.getLogWriter().info("Server created the region: "+r);
}
// pause(2000);
try {
@@ -1548,7 +1548,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
"Create Cache Server") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Create Cache Server. ###");
+ LogWriterUtils.getLogWriter().info("### Create Cache Server. ###");
//AttributesFactory factory = new AttributesFactory();
//factory.setScope(Scope.DISTRIBUTED_ACK);
//factory.setMirrorType(MirrorType.KEYS_VALUES);
@@ -1567,7 +1567,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
//assertTrue(getSystem().getDistributionManager().getOtherDistributionManagerIds().size() > 0);
for (int i = 0; i < regions.length; i++) {
Region r = createRegionWithoutRoot(regions[i], attr.create());
- LogWriterSupport.getLogWriter().info("Server created the region: "+r);
+ LogWriterUtils.getLogWriter().info("Server created the region: "+r);
}
// pause(2000);
try {
@@ -1618,8 +1618,8 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
SerializableRunnable createQService =
new CacheSerializableRunnable("Create Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Create Client. ###");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("### Create Client. ###");
+ LogWriterUtils.getLogWriter().info(
"Will connect to server at por: " + serverPorts[0] + " and at host : "
+ serverHost);
//Region region1 = null;
@@ -1642,7 +1642,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
for (int i=0; i < regions.length; i++) {
Region clientRegion = createRegion(regions[i], regionFactory.createRegionAttributes());
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + clientRegion);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + clientRegion);
//region1.getAttributesMutator().setCacheListener(new CqListener());
}
}
@@ -1658,7 +1658,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
//getLogWriter().info("### DEBUG CREATE CQ START ####");
//pause(20 * 1000);
- LogWriterSupport.getLogWriter().info("### Create CQ. ###" + cqName);
+ LogWriterUtils.getLogWriter().info("### Create CQ. ###" + cqName);
// Get CQ Service.
QueryService cqService = null;
try {
@@ -1669,7 +1669,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
}
// Create CQ Attributes.
CqAttributesFactory cqf = new CqAttributesFactory();
- CqListener[] cqListeners = {new CqQueryTestListener(LogWriterSupport.getLogWriter())};
+ CqListener[] cqListeners = {new CqQueryTestListener(LogWriterUtils.getLogWriter())};
((CqQueryTestListener)cqListeners[0]).cqName = cqName;
cqf.initCqListeners(cqListeners);
@@ -1679,11 +1679,11 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
try {
CqQuery cq1 = cqService.newCq(cqName, queryStr, cqa);
assertTrue("newCq() state mismatch", cq1.getState().isStopped());
- LogWriterSupport.getLogWriter().info("Created a new CqQuery : "+cq1);
+ LogWriterUtils.getLogWriter().info("Created a new CqQuery : "+cq1);
} catch (Exception ex){
AssertionError err = new AssertionError("Failed to create CQ " + cqName + " . ");
err.initCause(ex);
- LogWriterSupport.getLogWriter().info("CqService is :" + cqService, err);
+ LogWriterUtils.getLogWriter().info("CqService is :" + cqService, err);
throw err;
}
}
@@ -1722,7 +1722,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
for (int i = 1; i <= size; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
}
@@ -1749,13 +1749,13 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
public void createCacheClient(VM vm, final String[] serverHosts, final int[] serverPorts, final String redundancyLevel) {
vm.invoke(new CacheSerializableRunnable("createCacheClient") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Will connect to server at por: " + serverPorts[0] + " and at host : "
+ serverHosts[0]);
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer(serverHosts[0]/*getServerHostName(Host.getHost(0))*/, serverPorts[0]);
ccf.setPoolSubscriptionEnabled(true);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
// Create Client Cache.
getClientCache(ccf);
@@ -1781,7 +1781,7 @@ public class PartitionedRegionCqQueryDUnitTest extends CacheTestCase {
for (int i=0; i < regions.length; i++) {
Region clientRegion = ((ClientCache)getCache()).createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regions[i]);
- LogWriterSupport.getLogWriter().info("### Successfully Created Region on Client :" + clientRegion);
+ LogWriterUtils.getLogWriter().info("### Successfully Created Region on Client :" + clientRegion);
//region1.getAttributesMutator().setCacheListener(new CqListener());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryOptimizedExecuteDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryOptimizedExecuteDUnitTest.java b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryOptimizedExecuteDUnitTest.java
index 97cfd7c..659bf12 100644
--- a/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryOptimizedExecuteDUnitTest.java
+++ b/gemfire-cq/src/test/java/com/gemstone/gemfire/cache/query/cq/dunit/PartitionedRegionCqQueryOptimizedExecuteDUnitTest.java
@@ -25,8 +25,8 @@ import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -68,7 +68,7 @@ public class PartitionedRegionCqQueryOptimizedExecuteDUnitTest extends Partition
final int thePort = server.invokeInt(PartitionedRegionCqQueryOptimizedExecuteDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -95,7 +95,7 @@ public class PartitionedRegionCqQueryOptimizedExecuteDUnitTest extends Partition
for (int i = numOfEntries+1; i <= numOfEntries*2; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
@@ -166,7 +166,7 @@ public class PartitionedRegionCqQueryOptimizedExecuteDUnitTest extends Partition
final int thePort = server.invokeInt(PartitionedRegionCqQueryDUnitTest.class,
"getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server.getHost());
// Create client.
createClient(client, thePort, host0);
@@ -194,7 +194,7 @@ public class PartitionedRegionCqQueryOptimizedExecuteDUnitTest extends Partition
for (int i = numOfEntries+1; i <= numOfEntries*2; i++) {
region1.put(KEY+i, new Portfolio(i));
}
- LogWriterSupport.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
+ LogWriterUtils.getLogWriter().info("### Number of Entries in Region :" + region1.keys().size());
}
});
[32/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxStringQueryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxStringQueryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxStringQueryDUnitTest.java
index 60cd4d0..42459c9 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxStringQueryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/PdxStringQueryDUnitTest.java
@@ -66,8 +66,8 @@ import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -170,7 +170,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -183,7 +183,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
factory.setScope(Scope.LOCAL);
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
for (int i=0; i<numberOfEntries; i++) {
region.put("key-"+i, new PortfolioPdx(i));
}
@@ -210,35 +210,35 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
Query query = remoteQueryService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute();
resWithoutIndexRemote[i] = rs[0][0];
- LogWriterSupport.getLogWriter().info("RR remote indexType: no index size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote indexType: no index size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][0].asList(), queryString[i]);
- LogWriterSupport.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
query = localQueryService.newQuery(queryString[i]);
rs[0][1] = (SelectResults)query.execute();
resWithoutIndexLocal[i] = rs[0][1];
- LogWriterSupport.getLogWriter().info("RR client local indexType:no index size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR client local indexType:no index size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][1].asList(), queryString[i]);
} catch (Exception e) {
Assert.fail("Failed executing " + queryString[i], e);
}
try{
// to compare remote query results with and without index
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server for region2:" + queryString2[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server for region2:" + queryString2[i]);
Query query = remoteQueryService.newQuery(queryString2[i]);
resWithIndexRemote[i] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR remote region2 size of resultset: "+ resWithIndexRemote[i].size() + " for query: " + queryString2[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote region2 size of resultset: "+ resWithIndexRemote[i].size() + " for query: " + queryString2[i]);;
checkForPdxString(resWithIndexRemote[i].asList(), queryString2[i]);
// to compare local query results with and without index
- LogWriterSupport.getLogWriter().info("### Executing Query on local for region2:" + queryString2[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on local for region2:" + queryString2[i]);
query = localQueryService.newQuery(queryString2[i]);
resWithIndexLocal[i] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR local region2 size of resultset: "+ resWithIndexLocal[i].size() + " for query: " + queryString2[i]);;
+ LogWriterUtils.getLogWriter().info("RR local region2 size of resultset: "+ resWithIndexLocal[i].size() + " for query: " + queryString2[i]);;
checkForPdxString(resWithIndexLocal[i].asList(), queryString2[i]);
} catch (Exception e) {
Assert.fail("Failed executing " + queryString2[i], e);
@@ -295,7 +295,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("Put Objects locally on server");
+ LogWriterUtils.getLogWriter().info("Put Objects locally on server");
for (int i=numberOfEntries; i<numberOfEntries*2; i++) {
region.put("key-"+i, new Portfolio(i));
}
@@ -304,9 +304,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local indexType: no size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local indexType: no size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
@@ -315,7 +315,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
}
try{
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString2[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local indexType: no size of resultset: " + rs.size() + " for query: " + queryString2[i]);
+ LogWriterUtils.getLogWriter().info("RR server local indexType: no size of resultset: " + rs.size() + " for query: " + queryString2[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString2[i]);
}catch (Exception e) {
@@ -335,9 +335,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("isPR: false server local readSerializedTrue: indexType: false size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("isPR: false server local readSerializedTrue: indexType: false size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -357,9 +357,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 remotely to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) remoteQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server remote readSerializedTrue: indexType: false size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server remote readSerializedTrue: indexType: false size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -428,7 +428,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -441,7 +441,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
factory.setScope(Scope.LOCAL);
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
for (int i=0; i<numberOfEntries; i++) {
region.put("key-"+i, new PortfolioPdx(i));
}
@@ -482,16 +482,16 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
Query query = remoteQueryService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR remote indexType: CompactRange size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote indexType: CompactRange size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][0].asList(), queryString[i]);
- LogWriterSupport.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
query = localQueryService.newQuery(queryString[i]);
rs[0][1] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR client local indexType: CompactRange size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR client local indexType: CompactRange size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][1].asList(), queryString[i]);
if(i < orderByQueryIndex){
@@ -519,7 +519,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("Put Objects locally on server");
+ LogWriterUtils.getLogWriter().info("Put Objects locally on server");
for (int i=numberOfEntries; i<numberOfEntries*2; i++) {
region.put("key-"+i, new Portfolio(i));
}
@@ -528,9 +528,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -549,9 +549,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local readSerializedTrue: indexType: CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local readSerializedTrue: indexType: CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -571,9 +571,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 remotely to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) remoteQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server remote readSerializedTrue: indexType:CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server remote readSerializedTrue: indexType:CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -640,7 +640,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port0 = server0.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -653,7 +653,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
factory.setScope(Scope.LOCAL);
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
for (int i=0; i<numberOfEntries; i++) {
region.put("key-"+i, new PortfolioPdx(i));
}
@@ -690,16 +690,16 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
Query query = remoteQueryService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR remote indexType: Range size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote indexType: Range size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][0].asList(), queryString[i]);
- LogWriterSupport.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
query = localQueryService.newQuery(queryString[i]);
rs[0][1] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR client local indexType: Range size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR client local indexType: Range size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][1].asList(), queryString[i]);
if(i < orderByQueryIndex){
@@ -726,7 +726,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("Put Objects locally on server");
+ LogWriterUtils.getLogWriter().info("Put Objects locally on server");
for (int i=numberOfEntries; i<numberOfEntries*2; i++) {
region.put("key-"+i, new Portfolio(i));
}
@@ -735,9 +735,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -755,9 +755,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -775,9 +775,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 remotely to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) remoteQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server remote readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server remote readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -854,7 +854,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -867,7 +867,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
factory.setScope(Scope.LOCAL);
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
for (int i=0; i<numberOfEntries; i++) {
region.put("key-"+i, new PortfolioPdx(i));
}
@@ -894,35 +894,35 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
Query query = remoteQueryService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute();
resWithoutIndexRemote[i] = rs[0][0];
- LogWriterSupport.getLogWriter().info("RR remote no index size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote no index size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][0].asList(), queryString[i]);
- LogWriterSupport.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
query = localQueryService.newQuery(queryString[i]);
rs[0][1] = (SelectResults)query.execute();
resWithoutIndexLocal[i] = rs[0][1];
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " client local indexType:no index size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " client local indexType:no index size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][1].asList(), queryString[i]);
} catch (Exception e) {
Assert.fail("Failed executing " + queryString[i], e);
}
try{
// to compare remote query results with and without index
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server for region2:" + queryString2[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server for region2:" + queryString2[i]);
Query query = remoteQueryService.newQuery(queryString2[i]);
resWithIndexRemote[i] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " remote region2 size of resultset: "+ resWithIndexRemote[i].size() + " for query: " + queryString2[i]);;
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " remote region2 size of resultset: "+ resWithIndexRemote[i].size() + " for query: " + queryString2[i]);;
checkForPdxString(resWithIndexRemote[i].asList(), queryString2[i]);
// to compare local query results with and without index
- LogWriterSupport.getLogWriter().info("### Executing Query on local for region2:" + queryString2[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on local for region2:" + queryString2[i]);
query = localQueryService.newQuery(queryString2[i]);
resWithIndexLocal[i] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " local region2 size of resultset: "+ resWithIndexLocal[i].size() + " for query: " + queryString2[i]);;
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " local region2 size of resultset: "+ resWithIndexLocal[i].size() + " for query: " + queryString2[i]);;
checkForPdxString(resWithIndexLocal[i].asList(), queryString2[i]);
} catch (Exception e) {
Assert.fail("Failed executing " + queryString2[i], e);
@@ -931,8 +931,8 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
if(i < orderByQueryIndex){
// Compare local and remote query results.
if (!compareResultsOfWithAndWithoutIndex(rs)){
- LogWriterSupport.getLogWriter().info("result0="+rs[0][0].asList());
- LogWriterSupport.getLogWriter().info("result1="+rs[0][1].asList());
+ LogWriterUtils.getLogWriter().info("result0="+rs[0][0].asList());
+ LogWriterUtils.getLogWriter().info("result1="+rs[0][1].asList());
fail("Local and Remote Query Results are not matching for query :" + queryString[i]);
}
}
@@ -980,7 +980,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("Put Objects locally on server");
+ LogWriterUtils.getLogWriter().info("Put Objects locally on server");
for (int i=numberOfEntries; i<numberOfEntries*2; i++) {
region.put("key-"+i, new Portfolio(i));
}
@@ -989,9 +989,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("PR server local indexType:no size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("PR server local indexType:no size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -999,7 +999,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
}
try{
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString2[i]).execute();
- LogWriterSupport.getLogWriter().info("PR server local indexType: no size of resultset: " + rs.size() + " for query: " + queryString2[i]);
+ LogWriterUtils.getLogWriter().info("PR server local indexType: no size of resultset: " + rs.size() + " for query: " + queryString2[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString2[i]);
}catch (Exception e) {
@@ -1019,9 +1019,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " server local readSerializedTrue: indexType: no index size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " server local readSerializedTrue: indexType: no index size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1040,9 +1040,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 remotely to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) remoteQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server remote readSerializedTrue: indexType:no index size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server remote readSerializedTrue: indexType:no index size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1118,7 +1118,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -1131,7 +1131,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
factory.setScope(Scope.LOCAL);
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
for (int i=0; i<numberOfEntries; i++) {
region.put("key-"+i, new PortfolioPdx(i));
}
@@ -1179,16 +1179,16 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
Query query = remoteQueryService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR remote indexType:CompactRange size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote indexType:CompactRange size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][0].asList(), queryString[i]);
- LogWriterSupport.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
query = localQueryService.newQuery(queryString[i]);
rs[0][1] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " client local indexType:CompactRange size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " client local indexType:CompactRange size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][1].asList(), queryString[i]);
if(i < orderByQueryIndex){
@@ -1214,7 +1214,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("Put Objects locally on server");
+ LogWriterUtils.getLogWriter().info("Put Objects locally on server");
for (int i=numberOfEntries; i<numberOfEntries*2; i++) {
region.put("key-"+i, new Portfolio(i));
}
@@ -1223,9 +1223,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1245,9 +1245,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " server local readSerializedTrue: indexType:CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " server local readSerializedTrue: indexType:CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1267,9 +1267,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 remotely to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) remoteQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server remote readSerializedTrue: indexType: indexType:CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server remote readSerializedTrue: indexType: indexType:CompactRange size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1345,7 +1345,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -1358,7 +1358,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
factory.setScope(Scope.LOCAL);
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
for (int i=0; i<numberOfEntries; i++) {
region.put("key-"+i, new PortfolioPdx(i));
}
@@ -1403,23 +1403,23 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
for (int i=0; i < queryString.length; i++){
try {
- LogWriterSupport.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query on remote server:" + queryString[i]);
Query query = remoteQueryService.newQuery(queryString[i]);
rs[0][0] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("RR remote indexType: Range size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("RR remote indexType: Range size of resultset: "+ rs[0][0].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][0].asList(), queryString[i]);
- LogWriterSupport.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on client:" + queryString[i]);
query = localQueryService.newQuery(queryString[i]);
rs[0][1] = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " client local indexType: Range size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " client local indexType: Range size of resultset: "+ rs[0][1].size() + " for query: " + queryString[i]);;
checkForPdxString(rs[0][1].asList(), queryString[i]);
if(i < orderByQueryIndex){
// Compare local and remote query results.
if (!compareResultsOfWithAndWithoutIndex(rs)){
- LogWriterSupport.getLogWriter().info("result0="+rs[0][0].asList());
- LogWriterSupport.getLogWriter().info("result1="+rs[0][1].asList());
+ LogWriterUtils.getLogWriter().info("result0="+rs[0][0].asList());
+ LogWriterUtils.getLogWriter().info("result1="+rs[0][1].asList());
fail("Local and Remote Query Results are not matching for query :" + queryString[i]);
}
}
@@ -1441,7 +1441,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(regionName);
- LogWriterSupport.getLogWriter().info("Put Objects locally on server");
+ LogWriterUtils.getLogWriter().info("Put Objects locally on server");
for (int i=numberOfEntries; i<numberOfEntries*2; i++) {
region.put("key-"+i, new Portfolio(i));
}
@@ -1450,9 +1450,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server local indexType:Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1472,9 +1472,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 locally to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) localQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("isPR: " + isPr+ " server local readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("isPR: " + isPr+ " server local readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1494,9 +1494,9 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
// Query server1 remotely to check if PdxString is not being returned
for (int i = 0; i < queryString.length; i++) {
try {
- LogWriterSupport.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
+ LogWriterUtils.getLogWriter().info("### Executing Query locally on server:" + queryString[i]);
SelectResults rs = (SelectResults) remoteQueryService.newQuery(queryString[i]).execute();
- LogWriterSupport.getLogWriter().info("RR server remote readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
+ LogWriterUtils.getLogWriter().info("RR server remote readSerializedTrue: indexType: Range size of resultset: " + rs.size() + " for query: " + queryString[i]);
// The results should not be PdxString
checkForPdxString(rs.asList(), queryString[i]);
} catch (Exception e) {
@@ -1572,7 +1572,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
final int port1 = server1.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
final int port2 = server2.invokeInt(PdxStringQueryDUnitTest.class, "getCacheServerPort");
- final String host0 = NetworkSupport.getServerHostName(server0.getHost());
+ final String host0 = NetworkUtils.getServerHostName(server0.getHost());
// Create client pool.
final String poolName = "testClientServerQueryPool";
@@ -1586,7 +1586,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
ClientServerTestCase.configureConnectionPool(factory, host0, port1,-1, true, -1, -1, null);
Region region = createRegion(regionName, rootRegionName, factory.create());
- LogWriterSupport.getLogWriter().info("Put PortfolioPdx");
+ LogWriterUtils.getLogWriter().info("Put PortfolioPdx");
// Put some PortfolioPdx objects with null Status and secIds
for (int i=0; i<numberOfEntries*2; i++) {
PortfolioPdx portfolioPdx = new PortfolioPdx(i);
@@ -1649,7 +1649,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
try {
Query query = remoteQueryService.newQuery(qs[i]);
SelectResults res = (SelectResults)query.execute();
- LogWriterSupport.getLogWriter().info("PR NULL Pdxstring test size of resultset: "+ res.size() + " for query: " + qs[i]);;
+ LogWriterUtils.getLogWriter().info("PR NULL Pdxstring test size of resultset: "+ res.size() + " for query: " + qs[i]);;
if(i == 0){
for(Object o : res){
if(o != null){
@@ -1839,8 +1839,8 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
@Override
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm0.getHost()), port1);
- cf.addPoolServer(NetworkSupport.getServerHostName(vm1.getHost()), port2);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm0.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm1.getHost()), port2);
ClientCache cache = getClientCache(cf);
Region region = cache.createClientRegionFactory(ClientRegionShortcut.PROXY)
.create(regionName);
@@ -1935,12 +1935,12 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
SerializableRunnable closeCache =
new CacheSerializableRunnable("Close Client") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("### Close Client. ###");
+ LogWriterUtils.getLogWriter().info("### Close Client. ###");
try {
closeCache();
disconnectFromDS();
} catch (Exception ex) {
- LogWriterSupport.getLogWriter().info("### Failed to get close client. ###");
+ LogWriterUtils.getLogWriter().info("### Failed to get close client. ###");
}
}
};
@@ -1973,7 +1973,7 @@ public class PdxStringQueryDUnitTest extends CacheTestCase{
cpf.setSubscriptionEnabled(subscriptionEnabled);
cpf.setSubscriptionRedundancy(redundancy);
for (int i=0; i < servers.length; i++){
- LogWriterSupport.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
+ LogWriterUtils.getLogWriter().info("### Adding to Pool. ### Server : " + servers[i] + " Port : " + ports[i]);
cpf.addServer(servers[i], ports[i]);
}
cpf.create(poolName);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryDataInconsistencyDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryDataInconsistencyDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryDataInconsistencyDUnitTest.java
index fb7dd63..aeb4343 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryDataInconsistencyDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryDataInconsistencyDUnitTest.java
@@ -44,9 +44,9 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -204,7 +204,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
hooked = false;//Let client put go further.
}
});
- Threads.join(putThread, 200, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(putThread, 200);
}
public void testRangeIndex() {
@@ -299,7 +299,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
}
}
});
- Threads.join(putThread, 200, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(putThread, 200);
}
public void testRangeIndexWithIndexAndQueryFromCluaseMisMatch() {
@@ -390,7 +390,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
}
}
});
- Threads.join(putThread, 200, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(putThread, 200);
}
public void testRangeIndexWithIndexAndQueryFromCluaseMisMatch2() {
@@ -481,7 +481,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
}
}
});
- Threads.join(putThread, 200, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(putThread, 200);
}
public static void createProxyRegions() {
@@ -544,7 +544,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
Region region = cache.getRegion(repRegionName);
for (int j = from; j < to; j++)
region.put(new Integer(j), portfolio[j]);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRPuts: Inserted Portfolio data on Region "
+ regionName);
@@ -558,7 +558,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
switch (spot) {
case 9: //Before Index update and after region entry lock.
hooked = true;
- LogWriterSupport.getLogWriter().info("QueryDataInconsistency.IndexManagerTestHook is hooked in Update Index Entry.");
+ LogWriterUtils.getLogWriter().info("QueryDataInconsistency.IndexManagerTestHook is hooked in Update Index Entry.");
while(hooked) {
Wait.pause(100);
}
@@ -566,7 +566,7 @@ public class QueryDataInconsistencyDUnitTest extends CacheTestCase {
break;
case 10: //Before Region update and after Index Remove call.
hooked = true;
- LogWriterSupport.getLogWriter().info("QueryDataInconsistency.IndexManagerTestHook is hooked in Remove Index Entry.");
+ LogWriterUtils.getLogWriter().info("QueryDataInconsistency.IndexManagerTestHook is hooked in Remove Index Entry.");
while(hooked) {
Wait.pause(100);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
index 5d55dbb..5e9df71 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryIndexUsingXMLDUnitTest.java
@@ -63,12 +63,12 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -166,18 +166,18 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
AsyncInvocation asyInvk0 = vm0.invokeAsync(createIndexThrougXML("vm0testCreateIndexThroughXML", name, fileName));
AsyncInvocation asyInvk1 = vm1.invokeAsync(createIndexThrougXML("vm1testCreateIndexThroughXML", name, fileName));
- Threads.join(asyInvk1, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk1, 30 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
- Threads.join(asyInvk0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
@@ -233,7 +233,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
vm0.invoke(createIndexThrougXML("vm0testCreateIndexWhileDoingGII", name, fileName));
@@ -288,7 +288,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
vm0.invoke(createIndexThrougXML("vm0testRRegionCreateIndexWhileDoingGII", repRegName, fileName));
@@ -336,7 +336,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
vm0.invoke(createIndexThrougXML("vm0testPersistentPRRegion", persistentRegName, fileName));
@@ -401,7 +401,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("### in testCreateIndexWhileDoingGIIWithEmptyPRRegion.");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("### in testCreateIndexWhileDoingGIIWithEmptyPRRegion.");
vm0.invoke(createIndexThrougXML("vm0testGIIWithEmptyPRRegion", name, fileName));
@@ -438,12 +438,12 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
AsyncInvocation asyInvk0 = vm0.invokeAsync(createIndexThrougXML("vm0testAsyncIndexWhileDoingGII", name, fileName));
- Threads.join(asyInvk0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
@@ -456,14 +456,14 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
vm0.invoke(prIndexCreationCheck(name, statusIndex, 50));
- Threads.join(asyInvk1, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk1, 30 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
vm1.invoke(prIndexCreationCheck(name, statusIndex, 50));
- Threads.join(asyInvk0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
@@ -484,7 +484,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
vm0.invoke(createIndexThrougXML("vm0testIndexCompareQResults", name, fileName));
@@ -547,11 +547,11 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
AsyncInvocation asyInvk0 = vm0.invokeAsync(createIndexThrougXML("vm0testCreateAsyncIndexGIIAndQuery", name, fileName));
- Threads.join(asyInvk0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
@@ -563,11 +563,11 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
AsyncInvocation asyInvk1 = vm1.invokeAsync(createIndexThrougXML("vm1testCreateAsyncIndexGIIAndQuery", name, fileName));
- Threads.join(asyInvk1, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk1, 30 * 1000);
if (asyInvk1.exceptionOccurred()) {
Assert.fail("asyInvk1 failed", asyInvk1.getException());
}
- Threads.join(asyInvk0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
@@ -597,7 +597,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
vm0.invoke(createIndexThrougXML("vm0testAsyncIndexAndCompareQResults", name, fileName));
@@ -616,7 +616,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
vm1.invoke(setTestHook());
vm1.invoke(createIndexThrougXML("vm1testAsyncIndexAndCompareQResults", name, fileName));
- Threads.join(asyInvk0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyInvk0, 30 * 1000);
if (asyInvk0.exceptionOccurred()) {
Assert.fail("asyInvk0 failed", asyInvk0.getException());
}
@@ -640,7 +640,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
VM vm1 = host.getVM(1);
final String fileName = "IndexCreation.xml";
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Creating index using an xml file name : " + fileName);
//create index using xml
vm0.invoke(createIndexThrougXML("vm0testIndexCreationForReplicatedPersistentOverFlowRegionOnRestart", persistentOverFlowRegName, fileName));
@@ -887,7 +887,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
}
// compare.
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Execute query : \n queryStr with index: " + s[0] + " \n queryStr without index: " + s[1]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Execute query : \n queryStr with index: " + s[0] + " \n queryStr without index: " + s[1]);
ssORrs.CompareQueryResultsWithoutAndWithIndexes(sr, 1, s);
}
}
@@ -947,7 +947,7 @@ public class QueryIndexUsingXMLDUnitTest extends CacheTestCase {
new Exception("TEST DEBUG###" + diskStoreId).printStackTrace();
if (system == null || !system.isConnected()) {
// Figure out our distributed system properties
- Properties p = DistributedTestSupport.getAllDistributedSystemProperties(getDistributedSystemProperties());
+ Properties p = DistributedTestUtils.getAllDistributedSystemProperties(getDistributedSystemProperties());
system = (InternalDistributedSystem)DistributedSystem.connect(p);
}
return system;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryParamsAuthorizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryParamsAuthorizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryParamsAuthorizationDUnitTest.java
index 5cb5bef..5984576 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryParamsAuthorizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryParamsAuthorizationDUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -84,7 +84,7 @@ public class QueryParamsAuthorizationDUnitTest extends CacheTestCase {
@Override
public Object call() throws Exception {
ClientCacheFactory ccf = new ClientCacheFactory()
- .addPoolServer(NetworkSupport.getServerHostName(server1.getHost()), port)
+ .addPoolServer(NetworkUtils.getServerHostName(server1.getHost()), port)
.set("security-client-auth-init",
"templates.security.UserPasswordAuthInit.create")
.set("security-username", "root").set("security-password", "root");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingFunctionContextDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingFunctionContextDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingFunctionContextDUnitTest.java
index 3f36c18..fcc96dc 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingFunctionContextDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/dunit/QueryUsingFunctionContextDUnitTest.java
@@ -59,8 +59,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -733,7 +733,7 @@ public class QueryUsingFunctionContextDUnitTest extends CacheTestCase {
//Create client cache without regions
client.invoke(QueryUsingFunctionContextDUnitTest.class, "createCacheClientWithoutRegion",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), port1, port2,
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), port1, port2,
port3 });
//Create proxy regions on client.
@@ -1024,7 +1024,7 @@ public class QueryUsingFunctionContextDUnitTest extends CacheTestCase {
Region region = cache.getRegion(regionName);
for (int j = from; j < to; j++)
region.put(new Integer(j), portfolio[j]);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRPuts: Inserted Portfolio data on Region "
+ regionName);
[10/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationDUnitTest.java
index 7590e45..526f431 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.cache.operations.OperationContext.OperationCode;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import templates.security.UserPasswordAuthInit;
@@ -113,7 +113,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
fail("executeRIOpBlock: Unknown client number " + clientNum);
break;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"executeRIOpBlock: performing operation number ["
+ currentOp.getOpNum() + "]: " + currentOp);
if ((opFlags & OpFlags.USE_OLDCONN) == 0) {
@@ -161,7 +161,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
extraAuthzProps });
// Start the client with valid credentials but allowed or disallowed to
// perform an operation
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"executeRIOpBlock: For client" + clientNum + credentialsTypeStr
+ " credentials: " + opCredentials);
if (useThisVM) {
@@ -214,10 +214,10 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testAllowPutsGets: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testAllowPutsGets: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testAllowPutsGets: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testAllowPutsGets: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testAllowPutsGets: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
@@ -234,7 +234,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.PUT },
new String[] { regionName }, 1);
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllowPutsGets: For first client credentials: "
+ createCredentials);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -246,7 +246,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.GET },
new String[] { regionName }, 2);
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testAllowPutsGets: For second client credentials: "
+ getCredentials);
@@ -274,10 +274,10 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testDisallowPutsGets: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testDisallowPutsGets: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testDisallowPutsGets: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testDisallowPutsGets: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testDisallowPutsGets: Using accessor: " + accessor);
// Check that we indeed can obtain valid credentials not allowed to do
// gets
@@ -285,7 +285,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.PUT },
new String[] { regionName }, 1);
Properties createJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testDisallowPutsGets: For first client credentials: "
+ createCredentials);
Properties getCredentials = gen.getDisallowedCredentials(
@@ -293,7 +293,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new String[] { regionName }, 2);
Properties getJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testDisallowPutsGets: For second client disallowed GET credentials: "
+ getCredentials);
@@ -330,7 +330,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.GET },
new String[] { regionName }, 5);
getJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testDisallowPutsGets: For second client with GET credentials: "
+ getCredentials);
client2.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -356,8 +356,8 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testInvalidAccessor: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testInvalidAccessor: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testInvalidAccessor: Using authenticator: " + authenticator);
// Start server1 with invalid accessor
@@ -374,14 +374,14 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.PUT },
new String[] { regionName }, 3);
Properties createJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAccessor: For first client CREATE credentials: "
+ createCredentials);
Properties getCredentials = gen.getAllowedCredentials(
new OperationCode[] { OperationCode.GET },
new String[] { regionName }, 7);
Properties getJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAccessor: For second client GET credentials: "
+ getCredentials);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -398,7 +398,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new Integer(1), new Integer(SecurityTestUtil.AUTHFAIL_EXCEPTION) });
// Now start server2 that has valid accessor
- LogWriterSupport.getLogWriter().info("testInvalidAccessor: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testInvalidAccessor: Using accessor: " + accessor);
serverProps = buildProperties(authenticator, accessor, false,
extraAuthProps, extraAuthzProps);
server2.invoke(ClientAuthorizationTestBase.class, "createCacheServer",
@@ -433,11 +433,11 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsWithFailover: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsWithFailover: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsWithFailover: Using accessor: " + accessor);
// Start servers with all required properties
@@ -456,7 +456,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.PUT },
new String[] { regionName }, 1);
Properties createJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsWithFailover: For first client credentials: "
+ createCredentials);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -468,7 +468,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new OperationCode[] { OperationCode.GET },
new String[] { regionName }, 5);
Properties getJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsWithFailover: For second client credentials: "
+ getCredentials);
client2.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -501,7 +501,7 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
new String[] { regionName }, 9);
getJavaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testPutsGetsWithFailover: For second client disallowed GET credentials: "
+ noGetCredentials);
@@ -626,10 +626,10 @@ public class ClientAuthorizationDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testAllOpsWithFailover: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testAllOpsWithFailover: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testAllOpsWithFailover: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testAllOpsWithFailover: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testAllOpsWithFailover: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationTestBase.java
index d664dd5..665867f 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthorizationTestBase.java
@@ -61,7 +61,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.util.Callable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -267,7 +267,7 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
policy = InterestResultPolicy.NONE;
}
final int numOps = indices.length;
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got doOp for op: " + op.toString() + ", numOps: " + numOps
+ ", indices: " + indicesToString(indices) + ", expect: " + expectedResult);
boolean exceptionOccured = false;
@@ -307,7 +307,7 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
// server
if ((flags & OpFlags.CHECK_NOKEY) > 0) {
AbstractRegionEntry entry = (AbstractRegionEntry)((LocalRegion)region).getRegionEntry(searchKey);
- LogWriterSupport.getLogWriter().info(""+keyNum+": key is " + searchKey + " and entry is " + entry);
+ LogWriterUtils.getLogWriter().info(""+keyNum+": key is " + searchKey + " and entry is " + entry);
assertFalse(region.containsKey(searchKey));
}
else {
@@ -648,7 +648,7 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
}
catch (RegionDestroyedException ex) {
// harmless to ignore this
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"doOp: sub-region " + region.getFullPath()
+ " already destroyed");
operationOmitted = true;
@@ -675,14 +675,14 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
|| ex instanceof QueryInvocationTargetException || ex instanceof CqException)
&& (expectedResult.intValue() == SecurityTestUtil.NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"doOp: Got expected NotAuthorizedException when doing operation ["
+ op + "] with flags " + OpFlags.description(flags)
+ ": " + ex.getCause());
continue;
}
else if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"doOp: Got expected exception when doing operation: "
+ ex.toString());
continue;
@@ -728,7 +728,7 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
fail("executeOpBlock: Unknown client number " + clientNum);
break;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"executeOpBlock: performing operation number ["
+ currentOp.getOpNum() + "]: " + currentOp);
if ((opFlags & OpFlags.USE_OLDCONN) == 0) {
@@ -764,7 +764,7 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
extraAuthzProps });
// Start the client with valid credentials but allowed or disallowed to
// perform an operation
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"executeOpBlock: For client" + clientNum + credentialsTypeStr
+ " credentials: " + opCredentials);
boolean setupDynamicRegionFactory = (opFlags & OpFlags.ENABLE_DRF) > 0;
@@ -848,9 +848,9 @@ public class ClientAuthorizationTestBase extends DistributedTestCase {
String accessor = gen.getAuthorizationCallback();
TestAuthzCredentialGenerator tgen = new TestAuthzCredentialGenerator(gen);
- LogWriterSupport.getLogWriter().info(testName + ": Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(testName + ": Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info(testName + ": Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info(testName + ": Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(testName + ": Using authenticator: " + authenticator);
+ LogWriterUtils.getLogWriter().info(testName + ": Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientMultiUserAuthzDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientMultiUserAuthzDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientMultiUserAuthzDUnitTest.java
index 5566a23..b2b1245 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientMultiUserAuthzDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientMultiUserAuthzDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.execute.PRClientServerTestBase;
import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
@@ -76,10 +76,10 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testOps1: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testOps1: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testOps1: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testOps1: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testOps1: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
@@ -150,21 +150,21 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
gen.getDisallowedCredentials(new OperationCode[] {client1OpCodes[1]},
new String[] {regionName}, 1)};
if (client1Credentials[0] == null || client1Credentials[0].size() == 0) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps1: Unable to obtain valid credentials with "
+ client1OpCodes[0].toString()
+ " permission; skipping this combination.");
return false;
}
if (client1Credentials[1] == null || client1Credentials[1].size() == 0) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps1: Unable to obtain valid credentials with no "
+ client1OpCodes[0].toString()
+ " permission; skipping this combination.");
return false;
}
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps1: For first client credentials: " + client1Credentials[0]
+ "\n" + client1Credentials[1]);
client1.invoke(SecurityTestUtil.class, "createCacheClientForMultiUserMode",
@@ -179,21 +179,21 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
gen.getDisallowedCredentials(client2OpCodes,
new String[] {regionName}, 9)};
if (client2Credentials[0] == null || client2Credentials[0].size() == 0) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps1: Unable to obtain valid credentials with "
+ client2OpCodes[0].toString()
+ " permission; skipping this combination.");
return false;
}
if (client2Credentials[1] == null || client2Credentials[1].size() == 0) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps1: Unable to obtain valid credentials with no "
+ client2OpCodes[0].toString()
+ " permission; skipping this combination.");
return false;
}
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps1: For second client credentials: " + client2Credentials[0]
+ "\n" + client2Credentials[1]);
if (bothClientsInMultiuserMode) {
@@ -355,9 +355,9 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testOps2: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info("testOps2: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testOps2: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testOps2: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info("testOps2: Using authenticator: " + authenticator);
+ LogWriterUtils.getLogWriter().info("testOps2: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
@@ -384,7 +384,7 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
};
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps2: For first client credentials: " + client1Credentials[0]
+ "\n" + client1Credentials[1]);
client1.invoke(SecurityTestUtil.class,
@@ -401,7 +401,7 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
new String[] {regionName}, 9)};
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testOps2: For second client credentials: " + client2Credentials[0]
+ "\n" + client2Credentials[1]);
client2.invoke(SecurityTestUtil.class,
@@ -485,10 +485,10 @@ public class ClientMultiUserAuthzDUnitTest extends ClientAuthorizationTestBase {
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testOpsWithClientsInDifferentModes: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testOpsWithClientsInDifferentModes: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testOpsWithClientsInDifferentModes: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testOpsWithClientsInDifferentModes: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testOpsWithClientsInDifferentModes: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientAuthorizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientAuthorizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientAuthorizationDUnitTest.java
index 661210c..f4cd68c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientAuthorizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientAuthorizationDUnitTest.java
@@ -34,7 +34,7 @@ import com.gemstone.gemfire.cache.operations.OperationContext.OperationCode;
import com.gemstone.gemfire.internal.cache.PartitionedRegionLocalMaxMemoryDUnitTest.TestObject1;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
/**
* @since 6.1
@@ -136,10 +136,10 @@ public class DeltaClientAuthorizationDUnitTest extends
String authInit = cGen.getAuthInit();
String accessor = gen.getAuthorizationCallback();
- LogWriterSupport.getLogWriter().info("testAllowPutsGets: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info("testAllowPutsGets: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info(
"testAllowPutsGets: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testAllowPutsGets: Using accessor: " + accessor);
+ LogWriterUtils.getLogWriter().info("testAllowPutsGets: Using accessor: " + accessor);
// Start servers with all required properties
Properties serverProps = buildProperties(authenticator, accessor, false,
@@ -156,7 +156,7 @@ public class DeltaClientAuthorizationDUnitTest extends
new OperationCode[] { OperationCode.PUT },
new String[] { regionName }, 1);
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllowPutsGets: For first client credentials: "
+ createCredentials);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -168,7 +168,7 @@ public class DeltaClientAuthorizationDUnitTest extends
new OperationCode[] { OperationCode.GET },
new String[] { regionName }, 2);
javaProps = cGen.getJavaProperties();
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"testAllowPutsGets: For second client credentials: "
+ getCredentials);
@@ -198,7 +198,7 @@ public class DeltaClientAuthorizationDUnitTest extends
}
catch (Exception ex) {
if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info("Got expected exception when doing puts: " + ex);
+ LogWriterUtils.getLogWriter().info("Got expected exception when doing puts: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing puts", ex);
@@ -216,7 +216,7 @@ public class DeltaClientAuthorizationDUnitTest extends
}
catch (NoAvailableServersException ex) {
if (expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing puts: "
+ ex.getCause());
continue;
@@ -228,27 +228,27 @@ public class DeltaClientAuthorizationDUnitTest extends
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == SecurityTestUtil.NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing puts: "
+ ex.getCause());
continue;
}
if ((expectedResult.intValue() == SecurityTestUtil.AUTHREQ_EXCEPTION)
&& (ex.getCause() instanceof AuthenticationRequiredException)) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got expected AuthenticationRequiredException when doing puts: "
+ ex.getCause());
continue;
}
if ((expectedResult.intValue() == SecurityTestUtil.AUTHFAIL_EXCEPTION)
&& (ex.getCause() instanceof AuthenticationFailedException)) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got expected AuthenticationFailedException when doing puts: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info("Got expected exception when doing puts: " + ex);
+ LogWriterUtils.getLogWriter().info("Got expected exception when doing puts: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing puts", ex);
@@ -256,7 +256,7 @@ public class DeltaClientAuthorizationDUnitTest extends
}
catch (Exception ex) {
if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info("Got expected exception when doing puts: " + ex);
+ LogWriterUtils.getLogWriter().info("Got expected exception when doing puts: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing puts", ex);
@@ -276,7 +276,7 @@ public class DeltaClientAuthorizationDUnitTest extends
}
catch (Exception ex) {
if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info("Got expected exception when doing gets: " + ex);
+ LogWriterUtils.getLogWriter().info("Got expected exception when doing gets: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing gets", ex);
@@ -297,7 +297,7 @@ public class DeltaClientAuthorizationDUnitTest extends
}
catch(NoAvailableServersException ex) {
if(expectedResult.intValue() == SecurityTestUtil.NO_AVAILABLE_SERVERS) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got expected NoAvailableServers when doing puts: "
+ ex.getCause());
continue;
@@ -309,13 +309,13 @@ public class DeltaClientAuthorizationDUnitTest extends
catch (ServerConnectivityException ex) {
if ((expectedResult.intValue() == SecurityTestUtil.NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got expected NotAuthorizedException when doing gets: "
+ ex.getCause());
continue;
}
else if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info("Got expected exception when doing gets: " + ex);
+ LogWriterUtils.getLogWriter().info("Got expected exception when doing gets: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing gets", ex);
@@ -323,7 +323,7 @@ public class DeltaClientAuthorizationDUnitTest extends
}
catch (Exception ex) {
if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info("Got expected exception when doing gets: " + ex);
+ LogWriterUtils.getLogWriter().info("Got expected exception when doing gets: " + ex);
}
else {
Assert.fail("Got unexpected exception when doing gets", ex);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientPostAuthorizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientPostAuthorizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientPostAuthorizationDUnitTest.java
index 8d288f7..bcffeb1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientPostAuthorizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/DeltaClientPostAuthorizationDUnitTest.java
@@ -43,7 +43,7 @@ import com.gemstone.gemfire.internal.util.Callable;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -137,11 +137,11 @@ public class DeltaClientPostAuthorizationDUnitTest extends
String accessor = gen.getAuthorizationCallback();
TestAuthzCredentialGenerator tgen = new TestAuthzCredentialGenerator(gen);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllOpsNotifications: Using authinit: " + authInit);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllOpsNotifications: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testAllOpsNotifications: Using accessor: " + accessor);
// Start servers with all required properties
@@ -220,7 +220,7 @@ public class DeltaClientPostAuthorizationDUnitTest extends
fail("executeOpBlock: Unknown client number " + clientNum);
break;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"executeOpBlock: performing operation number ["
+ currentOp.getOpNum() + "]: " + currentOp);
if ((opFlags & OpFlags.USE_OLDCONN) == 0) {
@@ -256,7 +256,7 @@ public class DeltaClientPostAuthorizationDUnitTest extends
extraAuthzProps });
// Start the client with valid credentials but allowed or disallowed to
// perform an operation
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"executeOpBlock: For client" + clientNum + credentialsTypeStr
+ " credentials: " + opCredentials);
boolean setupDynamicRegionFactory = (opFlags & OpFlags.ENABLE_DRF) > 0;
@@ -377,7 +377,7 @@ public class DeltaClientPostAuthorizationDUnitTest extends
policy = InterestResultPolicy.NONE;
}
final int numOps = indices.length;
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Got doOp for op: " + op.toString() + ", numOps: " + numOps
+ ", indices: " + indicesToString(indices) + ", expect: " + expectedResult);
boolean exceptionOccured = false;
@@ -515,14 +515,14 @@ public class DeltaClientPostAuthorizationDUnitTest extends
|| ex instanceof QueryInvocationTargetException || ex instanceof CqException)
&& (expectedResult.intValue() == SecurityTestUtil.NOTAUTHZ_EXCEPTION)
&& (ex.getCause() instanceof NotAuthorizedException)) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"doOp: Got expected NotAuthorizedException when doing operation ["
+ op + "] with flags " + OpFlags.description(flags)
+ ": " + ex.getCause());
continue;
}
else if (expectedResult.intValue() == SecurityTestUtil.OTHER_EXCEPTION) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"doOp: Got expected exception when doing operation: "
+ ex.toString());
continue;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/P2PAuthenticationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/P2PAuthenticationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/P2PAuthenticationDUnitTest.java
index e8dea76..07bd7c7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/P2PAuthenticationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/P2PAuthenticationDUnitTest.java
@@ -41,8 +41,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -98,7 +98,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "26753");
props.setProperty(DistributionConfig.LOCATORS_NAME,
- NetworkSupport.getIPLiteral() + "[" + port + "]");
+ NetworkUtils.getIPLiteral() + "[" + port + "]");
props.setProperty(DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
"templates.security.UserPasswordAuthInit.create");
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
@@ -115,7 +115,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "26753");
props.setProperty(DistributionConfig.LOCATORS_NAME,
- NetworkSupport.getIPLiteral() +"[" + port + "]");
+ NetworkUtils.getIPLiteral() +"[" + port + "]");
props.setProperty(DistributionConfig.SECURITY_PEER_AUTHENTICATOR_NAME,
"templates.security.LdapUserAuthenticator.create");
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
@@ -166,7 +166,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
}
String authInit = " Incorrect_AuthInitialize";
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() + "[" + port + "]";
+ final String locators = NetworkUtils.getIPLiteral() + "[" + port + "]";
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, locators);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
@@ -177,7 +177,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
getUniqueName(), new Integer(port), props, javaProps,
expectedExceptions});
- LogWriter dsLogger = LogWriterSupport.createLogWriter(props);
+ LogWriter dsLogger = LogWriterUtils.createLogWriter(props);
SecurityTestUtil.addExpectedExceptions(expectedExceptions, dsLogger);
try {
new SecurityTestUtil("tmp").createSystem(props, null);
@@ -204,7 +204,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
props = new Properties();
}
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() +"["+port+"]";
+ final String locators = NetworkUtils.getIPLiteral() +"["+port+"]";
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, locators);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
@@ -215,7 +215,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
getUniqueName(), new Integer(port), props, javaProps,
expectedExceptions });
- LogWriter dsLogger = LogWriterSupport.createLogWriter(props);
+ LogWriter dsLogger = LogWriterUtils.createLogWriter(props);
SecurityTestUtil.addExpectedExceptions(expectedExceptions, dsLogger);
try {
new SecurityTestUtil("tmp").createSystem(props, javaProps);
@@ -244,7 +244,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
props = new Properties();
}
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() +"["+port+"]";
+ final String locators = NetworkUtils.getIPLiteral() +"["+port+"]";
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, locators);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
@@ -255,7 +255,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
getUniqueName(), new Integer(port), props, javaProps,
expectedExceptions });
- LogWriter dsLogger = LogWriterSupport.createLogWriter(props);
+ LogWriter dsLogger = LogWriterUtils.createLogWriter(props);
SecurityTestUtil.addExpectedExceptions(expectedExceptions, dsLogger);
try {
new SecurityTestUtil("tmp").createSystem(props, null);
@@ -282,7 +282,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
props = new Properties();
}
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() +"["+port+"]";
+ final String locators = NetworkUtils.getIPLiteral() +"["+port+"]";
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, locators);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
@@ -320,7 +320,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
props = new Properties();
}
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() +"["+port+"]";
+ final String locators = NetworkUtils.getIPLiteral() +"["+port+"]";
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, locators);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
@@ -340,7 +340,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
javaProps = gen.getJavaProperties();
props.putAll(credentials);
- LogWriter dsLogger = LogWriterSupport.createLogWriter(props);
+ LogWriter dsLogger = LogWriterUtils.createLogWriter(props);
SecurityTestUtil.addExpectedExceptions(expectedExceptions, dsLogger);
try {
new SecurityTestUtil("tmp").createSystem(props, javaProps);
@@ -402,7 +402,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
// Start the locator with the LDAP authenticator
Properties props = new Properties();
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() +"["+port+"]";
+ final String locators = NetworkUtils.getIPLiteral() +"["+port+"]";
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
authInit);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTHENTICATOR_NAME,
@@ -516,7 +516,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
// Start the locator with the Dummy authenticator
Properties props = new Properties();
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locators = NetworkSupport.getIPLiteral() +"["+port+"]";
+ final String locators = NetworkUtils.getIPLiteral() +"["+port+"]";
setProperty(props, DistributionConfig.SECURITY_PEER_AUTH_INIT_NAME,
authInit);
setProperty(props, DistributionConfig.SECURITY_PEER_AUTHENTICATOR_NAME,
@@ -552,7 +552,7 @@ public class P2PAuthenticationDUnitTest extends DistributedTestCase {
props.putAll(credentials);
props.putAll(extraProps);
- LogWriter dsLogger = LogWriterSupport.createLogWriter(props);
+ LogWriter dsLogger = LogWriterUtils.createLogWriter(props);
SecurityTestUtil.addExpectedExceptions(
new String[] { IllegalArgumentException.class.getName() }, dsLogger);
try {
[13/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/ClientHealthStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/ClientHealthStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/ClientHealthStatsDUnitTest.java
index 6533869..e501898 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/ClientHealthStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/ClientHealthStatsDUnitTest.java
@@ -49,7 +49,7 @@ import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -361,8 +361,8 @@ public class ClientHealthStatsDUnitTest extends DistributedTestCase {
String[] clientIds = bean.getClientIds();
assertTrue(clientIds.length == 2);
- LogWriterSupport.getLogWriter().info("<ExpectedString> ClientId-1 of the Server is " + clientIds[0] + "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info("<ExpectedString> ClientId-2 of the Server is " + clientIds[1] + "</ExpectedString> ");
+ LogWriterUtils.getLogWriter().info("<ExpectedString> ClientId-1 of the Server is " + clientIds[0] + "</ExpectedString> ");
+ LogWriterUtils.getLogWriter().info("<ExpectedString> ClientId-2 of the Server is " + clientIds[1] + "</ExpectedString> ");
ClientHealthStatus[] clientStatuses = bean.showAllClientStats();
@@ -372,15 +372,15 @@ public class ClientHealthStatsDUnitTest extends DistributedTestCase {
ClientHealthStatus clientStatus2 = bean.showClientStats(clientIds[1]);
assertNotNull(clientStatus1);
assertNotNull(clientStatus2);
- LogWriterSupport.getLogWriter().info("<ExpectedString> ClientStats-1 of the Server is " + clientStatus1 + "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info("<ExpectedString> ClientStats-2 of the Server is " + clientStatus2 + "</ExpectedString> ");
+ LogWriterUtils.getLogWriter().info("<ExpectedString> ClientStats-1 of the Server is " + clientStatus1 + "</ExpectedString> ");
+ LogWriterUtils.getLogWriter().info("<ExpectedString> ClientStats-2 of the Server is " + clientStatus2 + "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info("<ExpectedString> clientStatuses " + clientStatuses + "</ExpectedString> ");
+ LogWriterUtils.getLogWriter().info("<ExpectedString> clientStatuses " + clientStatuses + "</ExpectedString> ");
assertNotNull(clientStatuses);
assertTrue(clientStatuses.length == 2);
for (ClientHealthStatus status : clientStatuses) {
- LogWriterSupport.getLogWriter().info("<ExpectedString> ClientStats of the Server is " + status + "</ExpectedString> ");
+ LogWriterUtils.getLogWriter().info("<ExpectedString> ClientStats of the Server is " + status + "</ExpectedString> ");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/DLockManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/DLockManagementDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/DLockManagementDUnitTest.java
index 03e32fe..827d9b1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/DLockManagementDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/DLockManagementDUnitTest.java
@@ -29,7 +29,7 @@ import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedM
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.management.internal.SystemManagementService;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -202,7 +202,7 @@ public class DLockManagementDUnitTest extends ManagementTestBase {
assertNotNull(grantor);
- LogWriterSupport.getLogWriter().info("In identifyLockGrantor - grantor is " + grantor);
+ LogWriterUtils.getLogWriter().info("In identifyLockGrantor - grantor is " + grantor);
@@ -340,10 +340,10 @@ public class DLockManagementDUnitTest extends ManagementTestBase {
assertNotNull(bean);
String[] listHeldLock = bean.listHeldLocks();
assertEquals(listHeldLock.length, 1);
- LogWriterSupport.getLogWriter().info("List Of Lock Object is " + listHeldLock[0]);
+ LogWriterUtils.getLogWriter().info("List Of Lock Object is " + listHeldLock[0]);
Map<String, String> lockThreadMap = bean.listThreadsHoldingLock();
assertEquals(lockThreadMap.size(), 1);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"List Of Lock Thread is " + lockThreadMap.toString());
}
};
@@ -376,10 +376,10 @@ public class DLockManagementDUnitTest extends ManagementTestBase {
assertNotNull(bean);
String[] listHeldLock = bean.listHeldLocks();
assertEquals(listHeldLock.length, 1);
- LogWriterSupport.getLogWriter().info("List Of Lock Object is " + listHeldLock[0]);
+ LogWriterUtils.getLogWriter().info("List Of Lock Object is " + listHeldLock[0]);
Map<String, String> lockThreadMap = bean.listThreadsHoldingLock();
assertEquals(lockThreadMap.size(), 1);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"List Of Lock Thread is " + lockThreadMap.toString());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/DiskManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/DiskManagementDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/DiskManagementDUnitTest.java
index b796b49..6d7b799 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/DiskManagementDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/DiskManagementDUnitTest.java
@@ -178,9 +178,9 @@ public class DiskManagementDUnitTest extends ManagementTestBase {
VM vm1 = getManagedNodeList().get(1);
VM vm2 = getManagedNodeList().get(2);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
putAnEntry(vm0);
@@ -199,12 +199,12 @@ public class DiskManagementDUnitTest extends ManagementTestBase {
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("closing region in vm0");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("closing region in vm0");
closeRegion(vm0);
updateTheEntry(vm1);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("closing region in vm1");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("closing region in vm1");
closeRegion(vm1);
AsyncInvocation future = createPersistentRegionAsync(vm0);
waitForBlockedInitialization(vm0);
@@ -219,14 +219,14 @@ public class DiskManagementDUnitTest extends ManagementTestBase {
DistributedSystemMXBean bean = service.getDistributedSystemMXBean();
PersistentMemberDetails[] missingDiskStores = bean
.listMissingDiskStores();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting members=" + missingDiskStores);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting members=" + missingDiskStores);
assertNotNull(missingDiskStores);
assertEquals(1, missingDiskStores.length);
for (PersistentMemberDetails id : missingDiskStores) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Missing DiskStoreID is =" + id.getDiskStoreId());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Missing Host is =" + id.getHost());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Missing Directory is =" + id.getDirectory());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Missing DiskStoreID is =" + id.getDiskStoreId());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Missing Host is =" + id.getHost());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Missing Directory is =" + id.getDirectory());
try {
bean.revokeMissingDiskStores(id.getDiskStoreId());
@@ -379,11 +379,11 @@ public class DiskManagementDUnitTest extends ManagementTestBase {
Cache cache = getCache();
Region region = cache.getRegion(REGION_NAME);
DiskRegion dr = ((LocalRegion) region).getDiskRegion();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("putting key1");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("putting key1");
region.put("key1", "value1");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("putting key2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("putting key2");
region.put("key2", "value2");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("removing key2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("removing key2");
region.remove("key2");
// now that it is compactable the following forceCompaction should
// go ahead and do a roll and compact it.
@@ -415,7 +415,7 @@ public class DiskManagementDUnitTest extends ManagementTestBase {
assertTrue(compactedDiskStores.length > 0);
for (int i = 0; i < compactedDiskStores.length; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"<ExpectedString> Compacted Store " + i + " "
+ compactedDiskStores[i] + "</ExpectedString> ");
}
@@ -465,13 +465,13 @@ public class DiskManagementDUnitTest extends ManagementTestBase {
String[] allDisks = bean.listDiskStores(true);
assertNotNull(allDisks);
List<String> listString = Arrays.asList(allDisks);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"<ExpectedString> Remote All Disk Stores Are "
+ listString.toString() + "</ExpectedString> ");
String[] compactedDiskStores = bean.compactAllDiskStores();
assertTrue(compactedDiskStores.length > 0);
for (int i = 0; i < compactedDiskStores.length; i++) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"<ExpectedString> Remote Compacted Store " + i + " "
+ compactedDiskStores[i] + "</ExpectedString> ");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/DistributedSystemDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/DistributedSystemDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/DistributedSystemDUnitTest.java
index bb6ae96..85a92dc 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/DistributedSystemDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/DistributedSystemDUnitTest.java
@@ -50,7 +50,7 @@ import com.gemstone.gemfire.management.internal.SystemManagementService;
import com.gemstone.gemfire.management.internal.beans.MemberMBean;
import com.gemstone.gemfire.management.internal.beans.SequenceNumber;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -308,7 +308,7 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
null);
notificationListenerMap.put(objectName, listener);
} catch (InstanceNotFoundException e) {
- LogWriterSupport.getLogWriter().error(e);
+ LogWriterUtils.getLogWriter().error(e);
}
}
}
@@ -387,9 +387,9 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
try {
mbeanServer.removeNotificationListener(objectName, listener);
} catch (ListenerNotFoundException e) {
- LogWriterSupport.getLogWriter().error(e);
+ LogWriterUtils.getLogWriter().error(e);
} catch (InstanceNotFoundException e) {
- LogWriterSupport.getLogWriter().error(e);
+ LogWriterUtils.getLogWriter().error(e);
}
}
@@ -454,7 +454,7 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
try {
mbeanServer.removeNotificationListener(objectName, listener);
} catch (InstanceNotFoundException e) {
- LogWriterSupport.getLogWriter().error(e);
+ LogWriterUtils.getLogWriter().error(e);
} catch (ListenerNotFoundException e) {
// TODO: apparently there is never a notification listener on any these mbeans at this point
// fix this test so it doesn't hit these unexpected exceptions -- getLogWriter().error(e);
@@ -484,7 +484,7 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
} catch (ListenerNotFoundException e) {
// Expected Exception Do nothing
} catch (InstanceNotFoundException e) {
- LogWriterSupport.getLogWriter().error(e);
+ LogWriterUtils.getLogWriter().error(e);
}
}
}
@@ -733,10 +733,10 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
Iterator<DistributedMember> memberIt = otherMemberSet.iterator();
while (memberIt.hasNext()) {
DistributedMember member = memberIt.next();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"JVM Metrics For Member " + member.getId() + ":"
+ bean.showJVMMetrics(member.getId()));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"OS Metrics For Member " + member.getId() + ":"
+ bean.showOSMetrics(member.getId()));
}
@@ -782,7 +782,7 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
DistributedSystemMXBean bean = service.getDistributedSystemMXBean();
assertNotNull(service.getDistributedSystemMXBean());
bean.shutDownAllMembers();
- Wait.staticPause(2000);
+ Wait.pause(2000);
assertEquals(
cache.getDistributedSystem().getAllOtherMembers().size(), 1);
return null;
@@ -809,7 +809,7 @@ public class DistributedSystemDUnitTest extends ManagementTestBase {
waitForAllMembers(4);
for(int i =0; i< bean.listMemberObjectNames().length ; i++){
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"ObjectNames Of the Mmeber" + bean.listMemberObjectNames()[i] );
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/LocatorManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/LocatorManagementDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/LocatorManagementDUnitTest.java
index fdc6367..3f1c472 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/LocatorManagementDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/LocatorManagementDUnitTest.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.management.internal.ManagementConstants;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -167,7 +167,7 @@ public class LocatorManagementDUnitTest extends ManagementTestBase {
props.setProperty(DistributionConfig.MCAST_PORT_NAME,"0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
InetAddress bindAddr = null;
try {
@@ -229,8 +229,8 @@ public class LocatorManagementDUnitTest extends ManagementTestBase {
LocatorMXBean bean = service.getLocalLocatorMXBean();
assertNotNull(bean);
assertEquals(locPort, bean.getPort());
- LogWriterSupport.getLogWriter().info("Log of Locator" + bean.viewLog());
- LogWriterSupport.getLogWriter().info("BindAddress" + bean.getBindAddress());
+ LogWriterUtils.getLogWriter().info("Log of Locator" + bean.viewLog());
+ LogWriterUtils.getLogWriter().info("BindAddress" + bean.getBindAddress());
assertEquals(isPeer, bean.isPeerLocator());
return null;
}
@@ -255,8 +255,8 @@ public class LocatorManagementDUnitTest extends ManagementTestBase {
LocatorMXBean bean = MBeanUtil.getLocatorMbeanProxy(member);
assertNotNull(bean);
- LogWriterSupport.getLogWriter().info("Log of Locator" + bean.viewLog());
- LogWriterSupport.getLogWriter().info("BindAddress" + bean.getBindAddress());
+ LogWriterUtils.getLogWriter().info("Log of Locator" + bean.viewLog());
+ LogWriterUtils.getLogWriter().info("BindAddress" + bean.getBindAddress());
return null;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/MBeanUtil.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/MBeanUtil.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/MBeanUtil.java
index af085b3..6c3c8e7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/MBeanUtil.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/MBeanUtil.java
@@ -37,7 +37,7 @@ import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.management.internal.ManagementConstants;
import com.gemstone.gemfire.management.internal.SystemManagementService;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -538,7 +538,7 @@ public class MBeanUtil {
try {
propertyName = attributeInfo.getName();
propertyValue = mbeanServer.getAttribute(objName, propertyName);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> " + propertyName + " = " + propertyValue
+ "</ExpectedString> ");
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/ManagementTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/ManagementTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/ManagementTestBase.java
index 0c2d24b..d5b3178 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/ManagementTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/ManagementTestBase.java
@@ -558,7 +558,7 @@ public class ManagementTestBase extends DistributedTestCase {
RegionFactory rf = cache
.createRegionFactory(RegionShortcut.LOCAL);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Creating Local Region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Creating Local Region");
rf.create(localRegionName);
}
@@ -581,7 +581,7 @@ public class ManagementTestBase extends DistributedTestCase {
SystemManagementService service = (SystemManagementService) getManagementService();
Region region = cache.getRegion(parentRegionPath);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Creating Sub Region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Creating Sub Region");
region.createSubregion(subregionName, region.getAttributes());
}
@@ -624,7 +624,7 @@ public class ManagementTestBase extends DistributedTestCase {
SystemManagementService service = (SystemManagementService) getManagementService();
RegionFactory rf = cache.createRegionFactory(RegionShortcut.REPLICATE);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Creating Dist Region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Creating Dist Region");
rf.create(regionName);
}
@@ -646,7 +646,7 @@ public class ManagementTestBase extends DistributedTestCase {
SystemManagementService service = (SystemManagementService) getManagementService();
RegionFactory rf = cache
.createRegionFactory(RegionShortcut.PARTITION_REDUNDANT);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Creating Par Region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Creating Par Region");
rf.create(partitionRegionName);
}
@@ -665,7 +665,7 @@ public class ManagementTestBase extends DistributedTestCase {
public void run() {
GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Closing Dist Region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Closing Dist Region");
Region region = cache.getRegion(regionPath);
region.close();
@@ -689,7 +689,7 @@ public class ManagementTestBase extends DistributedTestCase {
public boolean done() {
if (bean.listMemberObjectNames() != null) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(
"Member Length " + bean.listMemberObjectNames().length);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/MemberMBeanAttributesDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/MemberMBeanAttributesDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/MemberMBeanAttributesDUnitTest.java
index 3db8048..284e7f8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/MemberMBeanAttributesDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/MemberMBeanAttributesDUnitTest.java
@@ -26,7 +26,7 @@ import com.gemstone.gemfire.internal.NanoTimer;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.process.PidUnavailableException;
import com.gemstone.gemfire.internal.process.ProcessUtils;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -113,7 +113,7 @@ public class MemberMBeanAttributesDUnitTest extends ManagementTestBase {
public void run() {
GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
RegionFactory rf = cache.createRegionFactory(RegionShortcut.REPLICATE);
- LogWriterSupport.getLogWriter().info("Creating Dist Region");
+ LogWriterUtils.getLogWriter().info("Creating Dist Region");
rf.create("testRegion1");
rf.create("testRegion2");
rf.create("testRegion3");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/QueryDataDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/QueryDataDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/QueryDataDUnitTest.java
index 84a270f..f98af24 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/QueryDataDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/QueryDataDUnitTest.java
@@ -53,7 +53,7 @@ import com.gemstone.gemfire.management.internal.cli.json.TypedJson;
import com.gemstone.gemfire.pdx.PdxInstance;
import com.gemstone.gemfire.pdx.PdxInstanceFactory;
import com.gemstone.gemfire.pdx.internal.PdxInstanceFactoryImpl;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -172,7 +172,7 @@ public class QueryDataDUnitTest extends ManagementTestBase {
Region region = cache.getRegion(regionName);
for (int j = from; j < to; j++)
region.put(new Integer(j), portfolio[j]);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"PRQueryDUnitHelper#getCacheSerializableRunnableForPRPuts: Inserted Portfolio data on Region "
+ regionName);
@@ -407,7 +407,7 @@ public class QueryDataDUnitTest extends ManagementTestBase {
if (jsonString1.contains("result")) {
JSONObject jsonObj = new JSONObject(jsonString1);
} else {
- LogWriterSupport.getLogWriter().info("Failed Test String" + queriesForRR[i] + " is = " + jsonString1);
+ LogWriterUtils.getLogWriter().info("Failed Test String" + queriesForRR[i] + " is = " + jsonString1);
fail("Join on Replicated did not work.");
}
}
@@ -800,19 +800,19 @@ public class QueryDataDUnitTest extends ManagementTestBase {
}, MAX_WAIT, 1000, true);
- LogWriterSupport.getLogWriter().info("member1RealData is = " + member1RealData);
- LogWriterSupport.getLogWriter().info("member2RealData is = " + member2RealData);
- LogWriterSupport.getLogWriter().info("member3RealData is = " + member3RealData);
+ LogWriterUtils.getLogWriter().info("member1RealData is = " + member1RealData);
+ LogWriterUtils.getLogWriter().info("member2RealData is = " + member2RealData);
+ LogWriterUtils.getLogWriter().info("member3RealData is = " + member3RealData);
String member1Result = bean.queryData(query, member1.getId(), 0);
- LogWriterSupport.getLogWriter().info("member1Result " + query + " is = " + member1Result);
+ LogWriterUtils.getLogWriter().info("member1Result " + query + " is = " + member1Result);
String member2Result = bean.queryData(query, member2.getId(), 0);
- LogWriterSupport.getLogWriter().info("member2Result " + query + " is = " + member2Result);
+ LogWriterUtils.getLogWriter().info("member2Result " + query + " is = " + member2Result);
String member3Result = bean.queryData(query, member3.getId(), 0);
- LogWriterSupport.getLogWriter().info("member3Result " + query + " is = " + member3Result);
+ LogWriterUtils.getLogWriter().info("member3Result " + query + " is = " + member3Result);
for (String val : member1RealData) {
assertTrue(member1Result.contains(val));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/RegionManagementDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/RegionManagementDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/RegionManagementDUnitTest.java
index fde678a..4166273 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/RegionManagementDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/RegionManagementDUnitTest.java
@@ -50,7 +50,7 @@ import com.gemstone.gemfire.internal.cache.partitioned.fixed.SingleHopQuarterPar
import com.gemstone.gemfire.management.internal.MBeanJMXAdapter;
import com.gemstone.gemfire.management.internal.SystemManagementService;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -443,7 +443,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
region.put(new Integer(total), array);
}
assertTrue(bean.getEntrySize() > 0);
- LogWriterSupport.getLogWriter().info("DEBUG: EntrySize =" + bean.getEntrySize());
+ LogWriterUtils.getLogWriter().info("DEBUG: EntrySize =" + bean.getEntrySize());
@@ -474,7 +474,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
assertNotNull(bean);
assertTrue(bean.getEntrySize() > 0);
- LogWriterSupport.getLogWriter().info("DEBUG: EntrySize =" + bean.getEntrySize());
+ LogWriterUtils.getLogWriter().info("DEBUG: EntrySize =" + bean.getEntrySize());
}
});
@@ -677,14 +677,14 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
attr.setPartitionAttributes(paf.create());
fixedPrRegion = cache.createRegion(FIXED_PR_NAME, attr.create());
assertNotNull(fixedPrRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region " + FIXED_PR_NAME + " created Successfully :"
+ fixedPrRegion.toString());
RegionMXBean bean = service.getLocalRegionMBean(FIXED_PR_PATH);
RegionAttributes regAttrs = fixedPrRegion.getAttributes();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"FixedPartitionAttribute From GemFire :"
+ regAttrs.getPartitionAttributes().getFixedPartitionAttributes());
@@ -701,7 +701,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
assertEquals(3, fixedPrData.length);
for (int i = 0; i < fixedPrData.length; i++) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Fixed PR Data is " + fixedPrData[i]
+ "</ExpectedString> ");
}
@@ -735,7 +735,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
assertNotNull(fixedPrData);
assertEquals(3, fixedPrData.length);
for (int i = 0; i < fixedPrData.length; i++) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Remote PR Data is " + fixedPrData[i]
+ "</ExpectedString> ");
}
@@ -774,7 +774,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
Set<ObjectName> names = service.queryMBeanNames(member);
if(names != null){
for(ObjectName name : names){
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> ObjectNames arr" + name
+ "</ExpectedString> ");
}
@@ -909,10 +909,10 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
EvictionAttributesData evictionData = bean.listEvictionAttributes();
assertNotNull(membershipData);
assertNotNull(evictionData);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Membership Data is "
+ membershipData.toString() + "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Eviction Data is " + membershipData.toString()
+ "</ExpectedString> ");
@@ -987,15 +987,15 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
// Check Stats related Data
// Add Mock testing
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info(
"<ExpectedString> CacheListenerCallsAvgLatency is "
+ bean.getCacheListenerCallsAvgLatency()
+ "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> CacheWriterCallsAvgLatency is "
+ bean.getCacheWriterCallsAvgLatency() + "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> CreatesRate is " + bean.getCreatesRate()
+ "</ExpectedString> ");
@@ -1096,10 +1096,10 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
EvictionAttributesData evictionData = bean.listEvictionAttributes();
assertNotNull(membershipData);
assertNotNull(evictionData);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Membership Data is " + membershipData.toString()
+ "</ExpectedString> ");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Eviction Data is " + membershipData.toString()
+ "</ExpectedString> ");
}
@@ -1147,7 +1147,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
try {
bean = service.getLocalRegionMBean(REGION_PATH);
} catch (ManagementException mgtEx) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Expected Exception "
+ mgtEx.getLocalizedMessage() + "</ExpectedString> ");
}
@@ -1172,12 +1172,12 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
public void run() {
GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
ManagementService service = getManagementService();
- LogWriterSupport.getLogWriter().info("Closing Par Region");
+ LogWriterUtils.getLogWriter().info("Closing Par Region");
RegionMXBean bean = null;
try {
bean = service.getLocalRegionMBean(PARTITIONED_REGION_PATH);
} catch (ManagementException mgtEx) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Expected Exception "
+ mgtEx.getLocalizedMessage() + "</ExpectedString> ");
}
@@ -1198,14 +1198,14 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
public void run() {
GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
ManagementService service = getManagementService();
- LogWriterSupport.getLogWriter().info("Closing Fixed Par Region");
+ LogWriterUtils.getLogWriter().info("Closing Fixed Par Region");
Region region = cache.getRegion(FIXED_PR_PATH);
region.close();
RegionMXBean bean = null;
try {
bean = service.getLocalRegionMBean(FIXED_PR_PATH);
} catch (ManagementException mgtEx) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Expected Exception "
+ mgtEx.getLocalizedMessage() + "</ExpectedString> ");
}
@@ -1426,7 +1426,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
Notification rn = notification;
assertTrue(rn.getType().equals(JMXNotificationType.REGION_CREATED)
|| rn.getType().equals(JMXNotificationType.REGION_CLOSED));
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Member Level Notifications" + rn.toString()
+ "</ExpectedString> ");
}
@@ -1445,7 +1445,7 @@ public class RegionManagementDUnitTest extends ManagementTestBase {
public void handleNotification(Notification notification, Object handback) {
assertNotNull(notification);
Notification rn = notification;
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"<ExpectedString> Distributed System Notifications" + rn.toString()
+ "</ExpectedString> ");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/UniversalMembershipListenerAdapterDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/UniversalMembershipListenerAdapterDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/UniversalMembershipListenerAdapterDUnitTest.java
index 17a9bd0..c25a1db 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/UniversalMembershipListenerAdapterDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/UniversalMembershipListenerAdapterDUnitTest.java
@@ -50,7 +50,7 @@ import com.gemstone.gemfire.management.membership.MembershipEvent;
import com.gemstone.gemfire.management.membership.MembershipListener;
import com.gemstone.gemfire.management.membership.UniversalMembershipListenerAdapter;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -378,7 +378,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
final int[] ports = new int[1];
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -392,9 +392,9 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
final DistributedMember serverMember = getDistributedMember();
final Properties serverProperties = getSystem().getProperties();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] serverMemberId=" + serverMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] serverMember=" + serverMember);
// register the bridge listener
ClientMembership.registerClientMembershipListener(bridgeListener);
@@ -413,14 +413,14 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new CacheSerializableRunnable("Create bridge client") {
@Override
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] create bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
getSystem(config);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, false, -1, -1, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, false, -1, -1, null);
createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
}
@@ -446,7 +446,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client join");
assertTrue(firedBridge[JOINED]);
assertEquals(clientMember, memberBridge[JOINED]);
//as of 6.1 the id can change when a bridge is created or a connection pool is created
@@ -492,7 +492,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
vm0.invoke(new SerializableRunnable("Wait for client to fully connect") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] wait for client to fully connect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] wait for client to fully connect");
final String pl =
getRootRegion().getSubregion(name).getAttributes().getPoolName();
PoolImpl pi = (PoolImpl)PoolManager.find(pl);
@@ -502,7 +502,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
vm0.invoke(new SerializableRunnable("Close bridge client region") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] close bridge client region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] close bridge client region");
getRootRegion().getSubregion(name).close();
PoolManager.close();
}
@@ -519,7 +519,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client left");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client left");
assertFalse(firedBridge[JOINED]);
assertNull(memberIdBridge[JOINED]);
@@ -581,7 +581,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client re-join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client re-join");
assertTrue(firedBridge[JOINED]);
assertEquals(clientMember, memberBridge[JOINED]);
assertEquals(clientMemberId, memberIdBridge[JOINED]);
@@ -626,7 +626,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
vm0.invoke(new SerializableRunnable("Wait for client to fully connect") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] wait for client to fully connect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] wait for client to fully connect");
final String pl =
getRootRegion().getSubregion(name).getAttributes().getPoolName();
PoolImpl pi = (PoolImpl)PoolManager.find(pl);
@@ -638,7 +638,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
try {
vm0.invoke(new SerializableRunnable("Stop bridge client") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] Stop bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] Stop bridge client");
getRootRegion().getSubregion(name).close();
PoolManager.close();
}
@@ -655,7 +655,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client crashed");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testLonerClientEventsInServer] assert server detected client crashed");
assertFalse(firedBridge[JOINED]);
assertNull(memberIdBridge[JOINED]);
assertNull(memberBridge[JOINED]);
@@ -762,7 +762,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new UniversalMembershipListenerAdapter() {
@Override
public synchronized void memberJoined(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] memberJoined >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] memberJoined >" + event.getMemberId() + "<");
firedAdapterDuplicate[JOINED] = firedAdapter[JOINED];
firedAdapter[JOINED] = true;
memberAdapter[JOINED] = event.getDistributedMember();
@@ -775,7 +775,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
@Override
public synchronized void memberLeft(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] memberLeft >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] memberLeft >" + event.getMemberId() + "<");
firedAdapterDuplicate[LEFT] = firedAdapter[LEFT];
firedAdapter[LEFT] = true;
memberAdapter[LEFT] = event.getDistributedMember();
@@ -788,7 +788,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
@Override
public synchronized void memberCrashed(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] memberCrashed >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] memberCrashed >" + event.getMemberId() + "<");
firedAdapterDuplicate[CRASHED] = firedAdapter[CRASHED];
firedAdapter[CRASHED] = true;
memberAdapter[CRASHED] = event.getDistributedMember();
@@ -834,7 +834,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
final int[] ports = new int[1];
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -854,9 +854,9 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
serverProperties.remove(DistributionConfig.SSL_PROTOCOLS_NAME);
serverProperties.remove(DistributionConfig.SSL_REQUIRE_AUTHENTICATION_NAME);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] serverMemberId=" + serverMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] serverMember=" + serverMember);
// register the bridge listener
ClientMembership.registerClientMembershipListener(bridgeListener);
@@ -875,12 +875,12 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new CacheSerializableRunnable("Create bridge client") {
@Override
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] create system bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] create system bridge client");
assertTrue(getSystem(serverProperties).isConnected());
assertFalse(getCache().isClosed());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, false, -1, -1, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, false, -1, -1, null);
createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
}
@@ -910,7 +910,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client join");
assertFalse(firedSystemDuplicate);
assertFalse(firedAdapterDuplicate);
assertFalse(firedBridgeDuplicate);
@@ -959,7 +959,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
vm0.invoke(new SerializableRunnable("Wait for client to fully connect") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] wait for client to fully connect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] wait for client to fully connect");
final String pl =
getRootRegion().getSubregion(name).getAttributes().getPoolName();
PoolImpl pi = (PoolImpl)PoolManager.find(pl);
@@ -970,7 +970,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
// close bridge client region
vm0.invoke(new SerializableRunnable("Close bridge client region") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] close bridge client region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] close bridge client region");
getRootRegion().getSubregion(name).close();
PoolManager.close();
}
@@ -987,7 +987,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client left");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client left");
assertFalse(firedSystemDuplicate);
assertFalse(firedAdapterDuplicate);
assertFalse(firedBridgeDuplicate);
@@ -1052,7 +1052,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client re-join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client re-join");
assertFalse(firedSystemDuplicate);
assertFalse(firedAdapterDuplicate);
assertFalse(firedBridgeDuplicate);
@@ -1101,7 +1101,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
vm0.invoke(new SerializableRunnable("Wait for client to fully connect") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] wait for client to fully connect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] wait for client to fully connect");
final String pl =
getRootRegion().getSubregion(name).getAttributes().getPoolName();
PoolImpl pi = (PoolImpl)PoolManager.find(pl);
@@ -1112,7 +1112,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
// have bridge client disconnect from system
vm0.invoke(new SerializableRunnable("Disconnect bridge client") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] disconnect bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] disconnect bridge client");
closeCache();
disconnectFromDS();
}
@@ -1134,7 +1134,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client left");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client left");
assertFalse(firedSystemDuplicate);
assertFalse(firedAdapterDuplicate);
assertFalse(firedBridgeDuplicate);
@@ -1204,7 +1204,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client re-join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client re-join");
assertFalse(firedSystemDuplicate);
assertFalse(firedAdapterDuplicate);
assertFalse(firedBridgeDuplicate);
@@ -1253,7 +1253,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
vm0.invoke(new SerializableRunnable("Wait for client to fully connect") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] wait for client to fully connect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] wait for client to fully connect");
final String pl =
getRootRegion().getSubregion(name).getAttributes().getPoolName();
PoolImpl pi = (PoolImpl)PoolManager.find(pl);
@@ -1266,7 +1266,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
try {
vm0.invoke(new SerializableRunnable("Close bridge client region") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] close bridge client region");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] close bridge client region");
getRootRegion().getSubregion(name).close();
PoolManager.close();
}
@@ -1283,7 +1283,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client crashed");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] assert server detected client crashed");
assertFalse(firedSystemDuplicate);
assertFalse(firedAdapterDuplicate);
assertFalse(firedBridgeDuplicate);
@@ -1337,7 +1337,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
* Note: This probably won't work if the pool has more than one Endpoint.
*/
protected void waitForClientToFullyConnect(final PoolImpl pool) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[waitForClientToFullyConnect]");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[waitForClientToFullyConnect]");
final long failMillis = System.currentTimeMillis() + JOIN_FAIL_MILLIS;
boolean fullyConnected = false;
while (!fullyConnected) {
@@ -1350,7 +1350,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
+ " connections were created.",
System.currentTimeMillis() < failMillis);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[waitForClientToFullyConnect] fullyConnected=" + fullyConnected);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[waitForClientToFullyConnect] fullyConnected=" + fullyConnected);
}
/**
@@ -1461,7 +1461,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new UniversalMembershipListenerAdapter() {
@Override
public synchronized void memberJoined(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInSystemClient] memberJoined >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInSystemClient] memberJoined >" + event.getMemberId() + "<");
firedAdapterDuplicate[JOINED] = firedAdapter[JOINED];
firedAdapter[JOINED] = true;
memberAdapter[JOINED] = event.getDistributedMember();
@@ -1474,7 +1474,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
@Override
public synchronized void memberLeft(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInSystemClient] memberLeft >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInSystemClient] memberLeft >" + event.getMemberId() + "<");
firedAdapterDuplicate[LEFT] = firedAdapter[LEFT];
firedAdapter[LEFT] = true;
memberAdapter[LEFT] = event.getDistributedMember();
@@ -1487,7 +1487,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
@Override
public synchronized void memberCrashed(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInSystemClient] memberCrashed >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInSystemClient] memberCrashed >" + event.getMemberId() + "<");
firedAdapterDuplicate[CRASHED] = firedAdapter[CRASHED];
firedAdapter[CRASHED] = true;
memberAdapter[CRASHED] = event.getDistributedMember();
@@ -1535,7 +1535,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
assertTrue(ports[0] != 0);
// create BridgeServer in controller vm...
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[doTestSystemClientEventsInServer] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[doTestSystemClientEventsInServer] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1554,9 +1554,9 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
serverProperties.remove(DistributionConfig.SSL_PROTOCOLS_NAME);
serverProperties.remove(DistributionConfig.SSL_REQUIRE_AUTHENTICATION_NAME);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] serverMemberId=" + serverMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] serverMember=" + serverMember);
GemFireCacheImpl cache = GemFireCacheImpl.getExisting();
assertNotNull(cache);
@@ -1572,7 +1572,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new CacheSerializableRunnable("Create Peer Cache") {
@Override
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] Create Peer cache");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] Create Peer cache");
getSystem(serverProperties);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1589,8 +1589,8 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
DistributedMember peerMember = (DistributedMember) vm0.invoke(
UniversalMembershipListenerAdapterDUnitTest.class, "getDistributedMember");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] peerMemberId=" + peerMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] peerMember=" + peerMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] peerMemberId=" + peerMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] peerMember=" + peerMember);
@@ -1605,7 +1605,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] assert server detected peer join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] assert server detected peer join");
assertFalse(firedSystemDuplicate);
// TODO: sometimes get adapter duplicate since memberId isn't endpoint
// initial impl uses Endpoint.toString() for memberId of server; final
@@ -1653,7 +1653,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
try {
vm0.invoke(new SerializableRunnable("Disconnect Peer server") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] disconnect peer server");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] disconnect peer server");
closeCache();
disconnectFromDS();
}
@@ -1676,7 +1676,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
ex.remove();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInPeerSystem] assert server detected peer crashed");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInPeerSystem] assert server detected peer crashed");
assertFalse(firedSystemDuplicate);
// TODO: sometimes get adapter duplicate since memberId isn't endpoint
// initial impl uses Endpoint.toString() for memberId of server; final
@@ -1757,7 +1757,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new UniversalMembershipListenerAdapter() {
@Override
public synchronized void memberJoined(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] memberJoined >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] memberJoined >" + event.getMemberId() + "<");
firedAdapterDuplicate[JOINED] = firedAdapter[JOINED];
firedAdapter[JOINED] = true;
memberAdapter[JOINED] = event.getDistributedMember();
@@ -1770,7 +1770,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
@Override
public synchronized void memberLeft(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] memberLeft >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] memberLeft >" + event.getMemberId() + "<");
firedAdapterDuplicate[LEFT] = firedAdapter[LEFT];
firedAdapter[LEFT] = true;
memberAdapter[LEFT] = event.getDistributedMember();
@@ -1783,7 +1783,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
@Override
public synchronized void memberCrashed(MembershipEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] memberCrashed >" + event.getMemberId() + "<");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] memberCrashed >" + event.getMemberId() + "<");
firedAdapterDuplicate[CRASHED] = firedAdapter[CRASHED];
firedAdapter[CRASHED] = true;
memberAdapter[CRASHED] = event.getDistributedMember();
@@ -1830,13 +1830,13 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
{ AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET) };
assertTrue(ports[0] != 0);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] create loner bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] create loner bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
getSystem(config);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] create system bridge client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] create system bridge client");
getSystem();
// register the bridge listener
@@ -1852,7 +1852,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
new CacheSerializableRunnable("Create BridgeServer") {
@Override
public void run2() throws CacheException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] Create BridgeServer");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -1863,7 +1863,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
testServerEventsInLonerClient_port = startBridgeServer(ports[0]);
}
catch (IOException e) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().error(e);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().error(e);
fail(e.getMessage());
}
}
@@ -1880,14 +1880,14 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
DistributedMember serverMember = (DistributedMember) vm0.invoke(
UniversalMembershipListenerAdapterDUnitTest.class, "getDistributedMember");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] serverMemberId=" + serverMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] serverMember=" + serverMember);
// create region which connects to bridge server
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, false, -1, -1, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, false, -1, -1, null);
createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
@@ -1902,7 +1902,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] assert client detected server join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] assert client detected server join");
// TODO: sometimes get adapter duplicate since memberId isn't endpoint KIRK
// initial impl uses Endpoint.toString() for memberId of server; final
// impl should have server send its real memberId to client via HandShake
@@ -1942,7 +1942,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
assertFalse(isClientAdapter[CRASHED]);
resetArraysForTesting(firedAdapter, memberAdapter, memberIdAdapter, isClientAdapter);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] wait for client to fully connect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] wait for client to fully connect");
final String pl =
getRootRegion().getSubregion(name).getAttributes().getPoolName();
PoolImpl pi = (PoolImpl)PoolManager.find(pl);
@@ -1971,7 +1971,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
try {
vm0.invoke(new SerializableRunnable("Disconnect bridge server") {
public void run() {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] disconnect bridge server");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] disconnect bridge server");
closeCache();
}
});
@@ -1995,7 +1995,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
lw.info(removeExpected2);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] assert client detected server crashed");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] assert client detected server crashed");
// TODO: sometimes get adapter duplicate since memberId isn't endpoint KIRK
// initial impl uses Endpoint.toString() for memberId of server; final
// impl should have server send its real memberId to client via HandShake
@@ -2051,9 +2051,9 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
serverMember = (DistributedMember) vm0.invoke(
UniversalMembershipListenerAdapterDUnitTest.class, "getDistributedMember");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] ports[0]=" + ports[0]);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] serverMemberId=" + serverMemberId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] serverMember=" + serverMember);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] ports[0]=" + ports[0]);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] serverMemberId=" + serverMemberId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] serverMember=" + serverMember);
synchronized(adapter) {
if (!firedAdapter[JOINED]) {
@@ -2066,7 +2066,7 @@ public class UniversalMembershipListenerAdapterDUnitTest extends ClientServerTes
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("[testServerEventsInLonerClient] assert client detected server re-join");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("[testServerEventsInLonerClient] assert client detected server re-join");
// TODO: sometimes get adapter duplicate since memberId isn't endpoint KIRK
// initial impl uses Endpoint.toString() for memberId of server; final
// impl should have server send its real memberId to client via HandShake
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/CliUtilDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/CliUtilDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/CliUtilDUnitTest.java
index 0511622..192b458 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/CliUtilDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/CliUtilDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.management.RegionMXBean;
import com.gemstone.gemfire.management.internal.cli.result.CommandResultException;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -165,22 +165,22 @@ public class CliUtilDUnitTest extends CacheTestCase {
checkBean(REGION_MEMBER1_GROUP2,1) &&
checkBean(REGION_MEMBER2_GROUP2,1) ;
if(!flag){
- LogWriterSupport.getLogWriter().info("Still probing for mbeans");
+ LogWriterUtils.getLogWriter().info("Still probing for mbeans");
return false;
}
else{
- LogWriterSupport.getLogWriter().info("All distributed region mbeans are federated to manager.");
+ LogWriterUtils.getLogWriter().info("All distributed region mbeans are federated to manager.");
return true;
}
}
private boolean checkBean(String string, int memberCount) {
DistributedRegionMXBean bean2 = service.getDistributedRegionMXBean(Region.SEPARATOR+string);
- LogWriterSupport.getLogWriter().info("DistributedRegionMXBean for region=" + string + " is " + bean2);
+ LogWriterUtils.getLogWriter().info("DistributedRegionMXBean for region=" + string + " is " + bean2);
if(bean2==null)
return false;
else{
int members = bean2.getMemberCount();
- LogWriterSupport.getLogWriter().info("DistributedRegionMXBean for region=" + string + " is aggregated for " + memberCount + " expected count=" + memberCount);
+ LogWriterUtils.getLogWriter().info("DistributedRegionMXBean for region=" + string + " is aggregated for " + memberCount + " expected count=" + memberCount);
if(members<memberCount){
return false;
}
@@ -196,7 +196,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
};
Wait.waitForCriterion(waitForMaangerMBean, 120000, 2000, true);
- LogWriterSupport.getLogWriter().info("Manager federation is complete");
+ LogWriterUtils.getLogWriter().info("Manager federation is complete");
}
private void registerFunction() {
@@ -212,7 +212,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
assertNotNull(service.getMemberMXBean());
RegionMXBean bean = service.getLocalRegionMBean(Region.SEPARATOR+regionName);
assertNotNull(bean);
- LogWriterSupport.getLogWriter().info("Created region=" + regionName + " Bean=" + bean);
+ LogWriterUtils.getLogWriter().info("Created region=" + regionName + " Bean=" + bean);
return region;
}
@@ -224,7 +224,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
localProps.setProperty(DistributionConfig.JMX_MANAGER_START_NAME, "false");
int jmxPort = AvailablePortHelper.getRandomAvailableTCPPort();
localProps.setProperty(DistributionConfig.JMX_MANAGER_PORT_NAME, ""+jmxPort);
- LogWriterSupport.getLogWriter().info("Set jmx-port="+ jmxPort);
+ LogWriterUtils.getLogWriter().info("Set jmx-port="+ jmxPort);
getSystem(localProps);
getCache();
final ManagementService service = ManagementService.getManagementService(getCache());
@@ -243,7 +243,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
final VM vm1 = Host.getHost(0).getVM(0);
- LogWriterSupport.getLogWriter().info("testFor - findAllMatchingMembers");
+ LogWriterUtils.getLogWriter().info("testFor - findAllMatchingMembers");
vm1.invoke(new SerializableRunnable() {
@Override
public void run() {
@@ -259,7 +259,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
}
});
- LogWriterSupport.getLogWriter().info("testFor - getDistributedMemberByNameOrId");
+ LogWriterUtils.getLogWriter().info("testFor - getDistributedMemberByNameOrId");
vm1.invoke(new SerializableRunnable() {
@Override
public void run() {
@@ -267,7 +267,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
}
});
- LogWriterSupport.getLogWriter().info("testFor - executeFunction");
+ LogWriterUtils.getLogWriter().info("testFor - executeFunction");
vm1.invoke(new SerializableRunnable() {
@Override
public void run() {
@@ -275,7 +275,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
}
});
- LogWriterSupport.getLogWriter().info("testFor - getRegionAssociatedMembers");
+ LogWriterUtils.getLogWriter().info("testFor - getRegionAssociatedMembers");
vm1.invoke(new SerializableRunnable() {
@Override
public void run() {
@@ -421,7 +421,7 @@ public class CliUtilDUnitTest extends CacheTestCase {
Region region = cache.getRegion(COMMON_REGION);
String id = cache.getDistributedSystem().getDistributedMember().getName();
region.put(id, object);
- LogWriterSupport.getLogWriter().info("Completed executeFunction on member : " + id);
+ LogWriterUtils.getLogWriter().info("Completed executeFunction on member : " + id);
context.getResultSender().lastResult(true);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CliCommandTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CliCommandTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CliCommandTestBase.java
index ee47beb..664e7a6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CliCommandTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/CliCommandTestBase.java
@@ -29,7 +29,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.shell.Gfsh;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -548,18 +548,18 @@ public class CliCommandTestBase extends CacheTestCase {
}
protected void info(String string) {
- LogWriterSupport.getLogWriter().info(string);
+ LogWriterUtils.getLogWriter().info(string);
}
protected void debug(String string) {
- LogWriterSupport.getLogWriter().fine(string);
+ LogWriterUtils.getLogWriter().fine(string);
}
protected void error(String string) {
- LogWriterSupport.getLogWriter().error(string);
+ LogWriterUtils.getLogWriter().error(string);
}
protected void error(String string, Throwable e) {
- LogWriterSupport.getLogWriter().error(string, e);
+ LogWriterUtils.getLogWriter().error(string, e);
}
}
[02/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/WANTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/WANTestBase.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/WANTestBase.java
index d2796c8..28f370f 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/WANTestBase.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/WANTestBase.java
@@ -117,7 +117,7 @@ import com.gemstone.gemfire.pdx.SimpleClass1;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -735,7 +735,7 @@ public class WANTestBase extends DistributedTestCase{
} else {
persistentDirectory = new File(diskStoreName);
}
- LogWriterSupport.getLogWriter().info("The ds is : " + persistentDirectory.getName());
+ LogWriterUtils.getLogWriter().info("The ds is : " + persistentDirectory.getName());
persistentDirectory.mkdir();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
File [] dirs1 = new File[] {persistentDirectory};
@@ -1206,7 +1206,7 @@ public class WANTestBase extends DistributedTestCase{
customerRegion = (PartitionedRegion)cache.createRegionFactory(
fact.create()).create(customerRegionName);
assertNotNull(customerRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region CUSTOMER created Successfully :"
+ customerRegion.toString());
@@ -1231,7 +1231,7 @@ public class WANTestBase extends DistributedTestCase{
orderRegion = (PartitionedRegion)cache.createRegionFactory(fact.create())
.create(orderRegionName);
assertNotNull(orderRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region ORDER created Successfully :"
+ orderRegion.toString());
@@ -1256,7 +1256,7 @@ public class WANTestBase extends DistributedTestCase{
shipmentRegion = (PartitionedRegion)cache.createRegionFactory(
fact.create()).create(shipmentRegionName);
assertNotNull(shipmentRegion);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Partitioned Region SHIPMENT created Successfully :"
+ shipmentRegion.toString());
}
@@ -1366,7 +1366,7 @@ public class WANTestBase extends DistributedTestCase{
boolean gatewaySslRequireAuth = true;
Properties gemFireProps = new Properties();
- gemFireProps.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ gemFireProps.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
gemFireProps.put(DistributionConfig.GATEWAY_SSL_ENABLED_NAME, String.valueOf(gatewaySslenabled));
gemFireProps.put(DistributionConfig.GATEWAY_SSL_PROTOCOLS_NAME, gatewaySslprotocols);
gemFireProps.put(DistributionConfig.GATEWAY_SSL_CIPHERS_NAME, gatewaySslciphers);
@@ -1383,7 +1383,7 @@ public class WANTestBase extends DistributedTestCase{
gemFireProps.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
gemFireProps.setProperty(DistributionConfig.LOCATORS_NAME, "localhost[" + locPort + "]");
- LogWriterSupport.getLogWriter().info("Starting cache ds with following properties \n" + gemFireProps);
+ LogWriterUtils.getLogWriter().info("Starting cache ds with following properties \n" + gemFireProps);
InternalDistributedSystem ds = test.getSystem(gemFireProps);
cache = CacheFactory.create(ds);
@@ -2601,7 +2601,7 @@ public class WANTestBase extends DistributedTestCase{
else {
persistentDirectory = new File(dsStore);
}
- LogWriterSupport.getLogWriter().info("The ds is : " + persistentDirectory.getName());
+ LogWriterUtils.getLogWriter().info("The ds is : " + persistentDirectory.getName());
persistentDirectory.mkdir();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
@@ -2623,12 +2623,12 @@ public class WANTestBase extends DistributedTestCase{
gateway.setPersistenceEnabled(true);
String dsname = dsf.setDiskDirs(dirs1).create(dsName).getName();
gateway.setDiskStoreName(dsname);
- LogWriterSupport.getLogWriter().info("The DiskStoreName is : " + dsname);
+ LogWriterUtils.getLogWriter().info("The DiskStoreName is : " + dsname);
}
else {
DiskStore store = dsf.setDiskDirs(dirs1).create(dsName);
gateway.setDiskStoreName(store.getName());
- LogWriterSupport.getLogWriter().info("The ds is : " + store.getName());
+ LogWriterUtils.getLogWriter().info("The ds is : " + store.getName());
}
gateway.setBatchConflationEnabled(isConflation);
gateway.create(dsName, remoteDsId);
@@ -2771,7 +2771,7 @@ public class WANTestBase extends DistributedTestCase{
WANTestBase test = new WANTestBase(getTestMethodName());
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.setProperty(DistributionConfig.LOCATORS_NAME, "localhost[" + locPort
+ "]");
@@ -2789,7 +2789,7 @@ public class WANTestBase extends DistributedTestCase{
fail("Expected GatewayReciever Exception");
}
catch (GatewayReceiverException gRE){
- LogWriterSupport.getLogWriter().fine("KBKBKB : got the GatewayReceiverException", gRE);
+ LogWriterUtils.getLogWriter().fine("KBKBKB : got the GatewayReceiverException", gRE);
assertTrue(gRE.getMessage().contains("Failed to create server socket on"));
}
catch (IOException e) {
@@ -2807,7 +2807,7 @@ public class WANTestBase extends DistributedTestCase{
Properties gemFireProps = new Properties();
- gemFireProps.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ gemFireProps.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
gemFireProps.put(DistributionConfig.GATEWAY_SSL_ENABLED_NAME, String.valueOf(gatewaySslenabled));
gemFireProps.put(DistributionConfig.GATEWAY_SSL_PROTOCOLS_NAME, gatewaySslprotocols);
gemFireProps.put(DistributionConfig.GATEWAY_SSL_CIPHERS_NAME, gatewaySslciphers);
@@ -2824,7 +2824,7 @@ public class WANTestBase extends DistributedTestCase{
gemFireProps.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
gemFireProps.setProperty(DistributionConfig.LOCATORS_NAME, "localhost[" + locPort + "]");
- LogWriterSupport.getLogWriter().info("Starting cache ds with following properties \n" + gemFireProps);
+ LogWriterUtils.getLogWriter().info("Starting cache ds with following properties \n" + gemFireProps);
InternalDistributedSystem ds = test.getSystem(gemFireProps);
cache = CacheFactory.create(ds);
@@ -2978,7 +2978,7 @@ public class WANTestBase extends DistributedTestCase{
region = cache.createRegion(regionName, attrs);
region.registerInterest("ALL_KEYS");
assertNotNull(region);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Distributed Region " + regionName + " created Successfully :"
+ region.toString());
}
@@ -3228,7 +3228,7 @@ public class WANTestBase extends DistributedTestCase{
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
+ LogWriterUtils.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
}
return custKeyValues;
}
@@ -3254,7 +3254,7 @@ public class WANTestBase extends DistributedTestCase{
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
return orderKeyValues;
}
@@ -3277,7 +3277,7 @@ public class WANTestBase extends DistributedTestCase{
"putOrderPartitionedRegionUsingCustId : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + custid + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + custid + " : " + order + " }");
}
return orderKeyValues;
}
@@ -3304,7 +3304,7 @@ public class WANTestBase extends DistributedTestCase{
"updateOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
return orderKeyValues;
@@ -3327,7 +3327,7 @@ public class WANTestBase extends DistributedTestCase{
"updateOrderPartitionedRegionUsingCustId : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + custid + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + custid + " : " + order + " }");
}
return orderKeyValues;
}
@@ -3356,7 +3356,7 @@ public class WANTestBase extends DistributedTestCase{
"putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -3405,7 +3405,7 @@ public class WANTestBase extends DistributedTestCase{
"putShipmentPartitionedRegionUsingCustId : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Shipment :- { " + custid + " : " + shipment + " }");
+ LogWriterUtils.getLogWriter().info("Shipment :- { " + custid + " : " + shipment + " }");
}
return shipmentKeyValue;
}
@@ -3434,7 +3434,7 @@ public class WANTestBase extends DistributedTestCase{
"updateShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -3459,7 +3459,7 @@ public class WANTestBase extends DistributedTestCase{
"updateShipmentPartitionedRegionUsingCustId : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Shipment :- { " + custid + " : " + shipment + " }");
+ LogWriterUtils.getLogWriter().info("Shipment :- { " + custid + " : " + shipment + " }");
}
return shipmentKeyValue;
}
@@ -3884,7 +3884,7 @@ public class WANTestBase extends DistributedTestCase{
for (int bucketId : bucketIds) {
List<GatewaySenderEventImpl> eventsForBucket = bucketToEventsMap
.get(bucketId);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Events for bucket: " + bucketId + " is " + eventsForBucket);
assertNotNull(eventsForBucket);
for (int i = 0; i < batchSize; i++) {
@@ -3906,7 +3906,7 @@ public class WANTestBase extends DistributedTestCase{
final Map eventsMap = ((MyAsyncEventListener)theListener).getEventsMap();
assertNotNull(eventsMap);
- LogWriterSupport.getLogWriter().info("The events map size is " + eventsMap.size());
+ LogWriterUtils.getLogWriter().info("The events map size is " + eventsMap.size());
return eventsMap.size();
}
@@ -3942,7 +3942,7 @@ public class WANTestBase extends DistributedTestCase{
};
Wait.waitForCriterion(wc, 200000, 500, true);
for(int i = 0 ; i < regionSize; i++){
- LogWriterSupport.getLogWriter().info("For Key : Key_"+i + " : Values : " + r.get("Key_" + i));
+ LogWriterUtils.getLogWriter().info("For Key : Key_"+i + " : Values : " + r.get("Key_" + i));
assertEquals(new SimpleClass(i, (byte)i), r.get("Key_" + i));
}
}
@@ -3964,7 +3964,7 @@ public class WANTestBase extends DistributedTestCase{
};
Wait.waitForCriterion(wc, 200000, 500, true);
for(int i = 0 ; i < regionSize; i++){
- LogWriterSupport.getLogWriter().info("For Key : Key_"+i + " : Values : " + r.get("Key_" + i));
+ LogWriterUtils.getLogWriter().info("For Key : Key_"+i + " : Values : " + r.get("Key_" + i));
assertEquals(new SimpleClass1(false, (short) i, "" + i, i,"" +i ,""+ i,i, i), r.get("Key_" + i));
}
}
@@ -4047,7 +4047,7 @@ public class WANTestBase extends DistributedTestCase{
public boolean done() {
for(Object key: keyValues.keySet()) {
if (!r.get(key).equals(keyValues.get(key))) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"The values are for key " + " " + key + " " + r.get(key)
+ " in the map " + keyValues.get(key));
return false;
@@ -4209,7 +4209,7 @@ public class WANTestBase extends DistributedTestCase{
}
}
if (sender.isPrimary()) {
- LogWriterSupport.getLogWriter().info("Gateway sender is killed by a test");
+ LogWriterUtils.getLogWriter().info("Gateway sender is killed by a test");
cache.getDistributedSystem().disconnect();
return Boolean.TRUE;
}
@@ -4231,7 +4231,7 @@ public class WANTestBase extends DistributedTestCase{
}
}
if (queue.isPrimary()) {
- LogWriterSupport.getLogWriter().info("AsyncEventQueue is killed by a test");
+ LogWriterUtils.getLogWriter().info("AsyncEventQueue is killed by a test");
cache.getDistributedSystem().disconnect();
return Boolean.TRUE;
}
@@ -4239,10 +4239,10 @@ public class WANTestBase extends DistributedTestCase{
}
public static void killSender(){
- LogWriterSupport.getLogWriter().info("Gateway sender is going to be killed by a test");
+ LogWriterUtils.getLogWriter().info("Gateway sender is going to be killed by a test");
cache.close();
cache.getDistributedSystem().disconnect();
- LogWriterSupport.getLogWriter().info("Gateway sender is killed by a test");
+ LogWriterUtils.getLogWriter().info("Gateway sender is killed by a test");
}
static void waitForSitesToUpdate() {
@@ -4561,7 +4561,7 @@ public class WANTestBase extends DistributedTestCase{
WaitCriterion wc = new WaitCriterion() {
public boolean done() {
if (bucket.keySet().size() == 0) {
- LogWriterSupport.getLogWriter().info("Bucket " + bucket.getId() + " is empty");
+ LogWriterUtils.getLogWriter().info("Bucket " + bucket.getId() + " is empty");
return true;
}
return false;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderDUnitTest.java
index f1ed833..dbdff58 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderDUnitTest.java
@@ -26,7 +26,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.internal.cache.wan.parallel.ConcurrentParallelGatewaySenderEventProcessor;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import java.net.SocketException;
@@ -512,7 +512,7 @@ public class ConcurrentParallelGatewaySenderDUnitTest extends WANTestBase {
Integer regionSize =
(Integer) vm2.invoke(WANTestBase.class, "getRegionSize", new Object[] {getTestMethodName() + "_PR" });
- LogWriterSupport.getLogWriter().info("Region size on remote is: " + regionSize);
+ LogWriterUtils.getLogWriter().info("Region size on remote is: " + regionSize);
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_1_DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_1_DUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_1_DUnitTest.java
index 5d17017..1ed4d1b 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_1_DUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_1_DUnitTest.java
@@ -20,7 +20,7 @@ import com.gemstone.gemfire.cache.wan.GatewaySender.OrderPolicy;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
/**
@@ -586,12 +586,12 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now started");
+ LogWriterUtils.getLogWriter().info("All the senders are now started");
//FIRST RUN: now, the senders are started. So, do some of the puts
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 200 });
- LogWriterSupport.getLogWriter().info("Done few puts");
+ LogWriterUtils.getLogWriter().info("Done few puts");
//now, stop all of the senders
vm4.invoke(WANTestBase.class, "stopSender", new Object[] { "ln" });
@@ -599,28 +599,28 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
vm6.invoke(WANTestBase.class, "stopSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "stopSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are stopped");
+ LogWriterUtils.getLogWriter().info("All the senders are stopped");
Wait.pause(2000);
//SECOND RUN: do some of the puts after the senders are stopped
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Done some more puts in second run");
+ LogWriterUtils.getLogWriter().info("Done some more puts in second run");
//Region size on remote site should remain same and below the number of puts done in the FIRST RUN
vm2.invoke(WANTestBase.class, "validateRegionSizeRemainsSame", new Object[] {getTestMethodName() + "_PR", 200 });
//SECOND RUN: start async puts on region
AsyncInvocation async = vm4.invokeAsync(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 5000 });
- LogWriterSupport.getLogWriter().info("Started high number of puts by async thread");
+ LogWriterUtils.getLogWriter().info("Started high number of puts by async thread");
- LogWriterSupport.getLogWriter().info("Starting the senders at the same time");
+ LogWriterUtils.getLogWriter().info("Starting the senders at the same time");
//when puts are happening by another thread, start the senders
vm4.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are started");
+ LogWriterUtils.getLogWriter().info("All the senders are started");
async.join();
@@ -752,7 +752,7 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created cache on local site");
+ LogWriterUtils.getLogWriter().info("Created cache on local site");
vm4.invoke(WANTestBase.class, "createConcurrentSender", new Object[] { "ln", 2,
true, 100, 10, false, false, null, true, 5, OrderPolicy.KEY });
@@ -763,7 +763,7 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
vm7.invoke(WANTestBase.class, "createConcurrentSender", new Object[] { "ln", 2,
true, 100, 10, false, false, null, true, 5, OrderPolicy.KEY });
- LogWriterSupport.getLogWriter().info("Created senders on local site");
+ LogWriterUtils.getLogWriter().info("Created senders on local site");
vm4.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName() + "_PR", "ln", 1, 100, isOffHeap() });
@@ -774,16 +774,16 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
vm7.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName() + "_PR", "ln", 1, 100, isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created PRs on local site");
+ LogWriterUtils.getLogWriter().info("Created PRs on local site");
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName() + "_PR", null, 1, 100, isOffHeap() });
vm3.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName() + "_PR", null, 1, 100, isOffHeap() });
- LogWriterSupport.getLogWriter().info("Created PRs on remote site");
+ LogWriterUtils.getLogWriter().info("Created PRs on remote site");
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Done 1000 puts on local site");
+ LogWriterUtils.getLogWriter().info("Done 1000 puts on local site");
//Since puts are already done on userPR, it will have the buckets created.
//During sender start, it will wait until those buckets are created for shadowPR as well.
@@ -799,16 +799,16 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Started senders on local site");
+ LogWriterUtils.getLogWriter().info("Started senders on local site");
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 5000 });
- LogWriterSupport.getLogWriter().info("Done 5000 puts on local site");
+ LogWriterUtils.getLogWriter().info("Done 5000 puts on local site");
vm4.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Paused senders on local site");
+ LogWriterUtils.getLogWriter().info("Paused senders on local site");
vm4.invoke(WANTestBase.class, "verifySenderPausedState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "verifySenderPausedState", new Object[] { "ln" });
@@ -817,13 +817,13 @@ public class ConcurrentParallelGatewaySenderOperation_1_DUnitTest extends WANTes
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Started 1000 async puts on local site");
+ LogWriterUtils.getLogWriter().info("Started 1000 async puts on local site");
vm4.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Resumed senders on local site");
+ LogWriterUtils.getLogWriter().info("Resumed senders on local site");
vm4.invoke(WANTestBase.class, "verifySenderResumedState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "verifySenderResumedState", new Object[] { "ln" });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_2_DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_2_DUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_2_DUnitTest.java
index 8f92810..4e74822 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_2_DUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentParallelGatewaySenderOperation_2_DUnitTest.java
@@ -24,7 +24,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -242,7 +242,7 @@ public class ConcurrentParallelGatewaySenderOperation_2_DUnitTest extends WANTes
try {
vm7.invoke(() -> createCache_INFINITE_MAXIMUM_SHUTDOWN_WAIT_TIME(lnPort));
- LogWriterSupport.getLogWriter().info("Created cache on local site");
+ LogWriterUtils.getLogWriter().info("Created cache on local site");
vm7.invoke(() -> createConcurrentSender("ln1", 2, true, 100, 10, false, false, null, true, 5, OrderPolicy.KEY));
vm7.invoke(() -> createConcurrentSender("ln2", 3, true, 100, 10, false, false, null, true, 5, OrderPolicy.KEY));
@@ -255,7 +255,7 @@ public class ConcurrentParallelGatewaySenderOperation_2_DUnitTest extends WANTes
String regionName = getTestMethodName() + "_PR";
vm7.invoke(() -> createPartitionedRegion(regionName, "ln1,ln2,ln3", 1, 10, isOffHeap()));
- LogWriterSupport.getLogWriter().info("Created PRs on local site");
+ LogWriterUtils.getLogWriter().info("Created PRs on local site");
vm4.invoke(() -> createPartitionedRegion(regionName, null, 1, 10, isOffHeap()));
vm5.invoke(() -> createPartitionedRegion(regionName, null, 1, 10, isOffHeap()));
@@ -380,7 +380,7 @@ public class ConcurrentParallelGatewaySenderOperation_2_DUnitTest extends WANTes
createAndStartSenderWithCustomerOrderShipmentRegion(vm4, lnPort, 5, true);
createAndStartSenderWithCustomerOrderShipmentRegion(vm5, lnPort, 5, true);
- LogWriterSupport.getLogWriter().info("Created PRs on local site");
+ LogWriterUtils.getLogWriter().info("Created PRs on local site");
vm2.invoke(() -> createCustomerOrderShipmentPartitionedRegion(null, null, 1, 100, isOffHeap()));
@@ -418,7 +418,7 @@ public class ConcurrentParallelGatewaySenderOperation_2_DUnitTest extends WANTes
createAndStartSenderWithCustomerOrderShipmentRegion(vm4, lnPort, 6, true);
createAndStartSenderWithCustomerOrderShipmentRegion(vm5, lnPort, 6, true);
- LogWriterSupport.getLogWriter().info("Created PRs on local site");
+ LogWriterUtils.getLogWriter().info("Created PRs on local site");
vm2.invoke(WANTestBase.class,
"createCustomerOrderShipmentPartitionedRegion", new Object[] { null,
@@ -486,7 +486,7 @@ public class ConcurrentParallelGatewaySenderOperation_2_DUnitTest extends WANTes
vm.invoke(() -> pauseSender("ln"));
}
vm.invoke(() -> createPartitionedRegion(getTestMethodName() + "_PR", "ln", 1, 10, isOffHeap()));
- LogWriterSupport.getLogWriter().info("Created PRs on local site");
+ LogWriterUtils.getLogWriter().info("Created PRs on local site");
}
protected void createReceiverAndDoPutsInPausedSender(int port) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentWANPropogation_1_DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentWANPropogation_1_DUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentWANPropogation_1_DUnitTest.java
index 1248417..c6123be 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentWANPropogation_1_DUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/concurrent/ConcurrentWANPropogation_1_DUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.internal.cache.wan.BatchException70;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
/**
* All the test cases are similar to SerialWANPropogationDUnitTest except that
@@ -202,7 +202,7 @@ public class ConcurrentWANPropogation_1_DUnitTest extends WANTestBase {
Integer regionSize =
(Integer) vm2.invoke(WANTestBase.class, "getRegionSize", new Object[] {getTestMethodName() + "_RR" });
- LogWriterSupport.getLogWriter().info("Region size on remote is: " + regionSize);
+ LogWriterUtils.getLogWriter().info("Region size on remote is: " + regionSize);
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/disttx/DistTXWANDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/disttx/DistTXWANDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/disttx/DistTXWANDUnitTest.java
index b5d15fe..86018c6 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/disttx/DistTXWANDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/disttx/DistTXWANDUnitTest.java
@@ -22,7 +22,7 @@ import com.gemstone.gemfire.internal.cache.ForceReattemptException;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
public class DistTXWANDUnitTest extends WANTestBase {
@@ -39,7 +39,7 @@ public class DistTXWANDUnitTest extends WANTestBase {
Invoke.invokeInEveryVM(new SerializableCallable() {
@Override
public Object call() throws Exception {
- System.setProperty("gemfire.log-level", LogWriterSupport.getDUnitLogLevel());
+ System.setProperty("gemfire.log-level", LogWriterUtils.getDUnitLogLevel());
return null;
}
});
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/CommonParallelGatewaySenderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/CommonParallelGatewaySenderDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/CommonParallelGatewaySenderDUnitTest.java
index 1cd6846..6be5b97 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/CommonParallelGatewaySenderDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/CommonParallelGatewaySenderDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.internal.cache.wan.parallel.ParallelGatewaySenderQue
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -287,7 +287,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
//create PR on remote site
vm2.invoke(WANTestBase.class, "createPartitionedRegion", new Object[] {
@@ -343,7 +343,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName()+"PR1", 3000 });
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName()+"PR2", 5000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -352,7 +352,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
vm6.invoke(WANTestBase.class, "killSender", new Object[] {});
vm7.invoke(WANTestBase.class, "killSender", new Object[] {});
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -360,7 +360,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -372,7 +372,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createPartitionedRegion", new Object[] {
getTestMethodName()+"PR1", "ln", 1, 100, isOffHeap() });
@@ -412,7 +412,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
fail();
}
- LogWriterSupport.getLogWriter().info("Created back the partitioned regions");
+ LogWriterUtils.getLogWriter().info("Created back the partitioned regions");
//start the senders in async mode. This will ensure that the
//node of shadow PR that went down last will come up first
@@ -421,14 +421,14 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -463,7 +463,7 @@ public class CommonParallelGatewaySenderDUnitTest extends WANTestBase {
WaitCriterion wc = new WaitCriterion() {
public boolean done() {
if (bucket.keySet().size() == 0) {
- LogWriterSupport.getLogWriter().info("Bucket " + bucket.getId() + " is empty");
+ LogWriterUtils.getLogWriter().info("Bucket " + bucket.getId() + " is empty");
return true;
}
return false;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/NewWANConcurrencyCheckForDestroyDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/NewWANConcurrencyCheckForDestroyDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/NewWANConcurrencyCheckForDestroyDUnitTest.java
index eab7004..4b5fb00 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/NewWANConcurrencyCheckForDestroyDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/NewWANConcurrencyCheckForDestroyDUnitTest.java
@@ -31,7 +31,7 @@ import com.gemstone.gemfire.internal.cache.Token.Tombstone;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
/**
@@ -82,7 +82,7 @@ public class NewWANConcurrencyCheckForDestroyDUnitTest extends WANTestBase {
"createFirstRemoteLocator", new Object[] { 3, lnPort });
Integer tkRecPort = (Integer) vm5.invoke(WANTestBase.class, "createReceiver", new Object[] { tkPort });
- LogWriterSupport.getLogWriter().info("Created locators and receivers in 3 distributed systems");
+ LogWriterUtils.getLogWriter().info("Created locators and receivers in 3 distributed systems");
//Site 1
vm1.invoke(WANTestBase.class, "createSender", new Object[] { "ln1", 2,
@@ -162,7 +162,7 @@ public class NewWANConcurrencyCheckForDestroyDUnitTest extends WANTestBase {
"createFirstRemoteLocator", new Object[] { 2, lnPort });
Integer nyRecPort = (Integer) vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
- LogWriterSupport.getLogWriter().info("Created locators and receivers in 2 distributed systems");
+ LogWriterUtils.getLogWriter().info("Created locators and receivers in 2 distributed systems");
//Site 1
vm1.invoke(WANTestBase.class, "createSender", new Object[] { "ln1", 2,
@@ -267,7 +267,7 @@ public void testPutAllEventSequenceOnSerialGatewaySenderWithPR() {
"createFirstRemoteLocator", new Object[] { 2, lnPort });
Integer nyRecPort = (Integer) vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
- LogWriterSupport.getLogWriter().info("Created locators and receivers in 2 distributed systems");
+ LogWriterUtils.getLogWriter().info("Created locators and receivers in 2 distributed systems");
//Site 1
vm1.invoke(WANTestBase.class, "createSender", new Object[] { "ln1", 2,
@@ -374,7 +374,7 @@ public void testPutAllEventSequenceOnSerialGatewaySenderWithPR() {
"createFirstRemoteLocator", new Object[] { 2, lnPort });
Integer nyRecPort = (Integer) vm3.invoke(WANTestBase.class, "createReceiver", new Object[] { nyPort });
- LogWriterSupport.getLogWriter().info("Created locators and receivers in 2 distributed systems");
+ LogWriterUtils.getLogWriter().info("Created locators and receivers in 2 distributed systems");
//Site 1
vm1.invoke(WANTestBase.class, "createSender", new Object[] { "ln1", 2,
@@ -483,7 +483,7 @@ public void testPutAllEventSequenceOnSerialGatewaySenderWithPR() {
re = ((NonTXEntry)entry).getRegionEntry();
}
if (re != null) {
- LogWriterSupport.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region));
+ LogWriterUtils.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region));
VersionTag tag = re.getVersionStamp().asVersionTag();
return tag.getVersionTimeStamp();
@@ -503,7 +503,7 @@ public void testPutAllEventSequenceOnSerialGatewaySenderWithPR() {
Region.Entry entry = ((LocalRegion)region).getEntry("testKey", /*null,*/ true);
RegionEntry re = ((EntrySnapshot)entry).getRegionEntry();
- LogWriterSupport.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region));
+ LogWriterUtils.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region));
assertTrue(re.getValueInVM((LocalRegion) region) instanceof Tombstone);
VersionTag tag = re.getVersionStamp().asVersionTag();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPersistenceDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPersistenceDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPersistenceDUnitTest.java
index 4d52811..b990a22 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPersistenceDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPersistenceDUnitTest.java
@@ -22,7 +22,7 @@ import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBase {
@@ -77,7 +77,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", isOffHeap() });
@@ -105,7 +105,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR", 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -121,7 +121,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
exp1.remove();
}
*/
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -129,7 +129,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -141,7 +141,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createReplicatedRegion", new Object[] {
@@ -169,14 +169,14 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
getTestMethodName() + "_RR", 3000 });
@@ -234,7 +234,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(WANTestBase.class,
"createReplicatedRegion", new Object[] { getTestMethodName() + "_RR", "ln",
@@ -266,7 +266,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR", 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -288,7 +288,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -300,7 +300,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createReplicatedRegion", new Object[] {
@@ -329,14 +329,14 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -414,7 +414,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
String diskStore4 = (String) vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, null, true });
- LogWriterSupport.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
+ LogWriterUtils.getLogWriter().info("The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + "," + diskStore4);
vm4.invoke(WANTestBase.class, "createReplicatedRegion", new Object[] {
getTestMethodName() + "_RR", "ln", Scope.DISTRIBUTED_ACK,
@@ -456,7 +456,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
//start puts in region on local site
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_RR", 3000 });
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 3000 });
- LogWriterSupport.getLogWriter().info("Completed puts in the region");
+ LogWriterUtils.getLogWriter().info("Completed puts in the region");
//--------------------close and rebuild local site -------------------------------------------------
//kill the senders
@@ -472,7 +472,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
exp1.remove();
}
*/
- LogWriterSupport.getLogWriter().info("Killed all the senders.");
+ LogWriterUtils.getLogWriter().info("Killed all the senders.");
//restart the vm
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
@@ -480,7 +480,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm6.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm7.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
//create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore",
@@ -492,7 +492,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm7.invoke(WANTestBase.class, "createSenderWithDiskStore",
new Object[] { "ln", 2, true, 100, 10, false, true, null, diskStore4, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
//create PR on local site
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "createReplicatedRegion", new Object[] {
@@ -545,14 +545,14 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
//wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "waitForSenderRunningState", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are now running...");
+ LogWriterUtils.getLogWriter().info("All the senders are now running...");
//----------------------------------------------------------------------------------------------------
@@ -642,7 +642,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
"createSenderWithDiskStore", new Object[] { "ln", 2, true, 100, 10,
false, true, null, null, true });
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"The DS are: " + diskStore1 + "," + diskStore2 + "," + diskStore3 + ","
+ diskStore4);
@@ -704,7 +704,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm4.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
vm5.invoke(WANTestBase.class, "createCache", new Object[] { lnPort });
- LogWriterSupport.getLogWriter().info("Created back the cache");
+ LogWriterUtils.getLogWriter().info("Created back the cache");
// create senders with disk store
vm4.invoke(WANTestBase.class, "createSenderWithDiskStore", new Object[] {
@@ -712,7 +712,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm5.invoke(WANTestBase.class, "createSenderWithDiskStore", new Object[] {
"ln", 2, true, 100, 10, false, true, null, diskStore2, true });
- LogWriterSupport.getLogWriter().info("Created the senders back from the disk store.");
+ LogWriterUtils.getLogWriter().info("Created the senders back from the disk store.");
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class,
"createReplicatedRegion",
@@ -737,7 +737,7 @@ public class ReplicatedRegion_ParallelWANPersistenceDUnitTest extends WANTestBas
vm4.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Waiting for senders running.");
+ LogWriterUtils.getLogWriter().info("Waiting for senders running.");
// wait for senders running
vm4.invoke(WANTestBase.class, "waitForSenderRunningState",
new Object[] { "ln" });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPropogationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPropogationDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPropogationDUnitTest.java
index 18b1109..6c451a3 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPropogationDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ReplicatedRegion_ParallelWANPropogationDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
public class ReplicatedRegion_ParallelWANPropogationDUnitTest extends WANTestBase{
@@ -954,7 +954,7 @@ public class ReplicatedRegion_ParallelWANPropogationDUnitTest extends WANTestBas
Integer size = (Integer)vm4.invoke(WANTestBase.class,
"getQueueContentSize", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("The size of the queue is in vm4 " + size);
+ LogWriterUtils.getLogWriter().info("The size of the queue is in vm4 " + size);
vm4.invoke(WANTestBase.class,
@@ -962,7 +962,7 @@ public class ReplicatedRegion_ParallelWANPropogationDUnitTest extends WANTestBas
size = (Integer)vm4.invoke(WANTestBase.class,
"getQueueContentSize", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("The size of the queue is in vm4 " + size);
+ LogWriterUtils.getLogWriter().info("The size of the queue is in vm4 " + size);
vm2.invoke(WANTestBase.class, "validateRegionSize", new Object[] {
getTestMethodName() + "_RR", 1000 });
@@ -1076,7 +1076,7 @@ public class ReplicatedRegion_ParallelWANPropogationDUnitTest extends WANTestBas
Region r = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(r);
for (long i = 0; i < numPuts; i++) {
- LogWriterSupport.getLogWriter().info("Put : key : " + i);
+ LogWriterUtils.getLogWriter().info("Put : key : " + i);
r.put(i, "0_" + i);
}
} finally {
@@ -1095,7 +1095,7 @@ public class ReplicatedRegion_ParallelWANPropogationDUnitTest extends WANTestBas
Region r = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(r);
for (long i = 0; i < numPuts; i++) {
- LogWriterSupport.getLogWriter().info("Put : key : " + i);
+ LogWriterUtils.getLogWriter().info("Put : key : " + i);
r.put(i, "1_" + i);
}
} finally {
@@ -1113,7 +1113,7 @@ public class ReplicatedRegion_ParallelWANPropogationDUnitTest extends WANTestBas
Region r = cache.getRegion(Region.SEPARATOR + regionName);
assertNotNull(r);
for (long i = 0; i < numPuts; i++) {
- LogWriterSupport.getLogWriter().info("Put : key : " + i);
+ LogWriterUtils.getLogWriter().info("Put : key : " + i);
r.put(i, "2_" + i);
}
} finally {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ShutdownAllPersistentGatewaySenderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ShutdownAllPersistentGatewaySenderDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ShutdownAllPersistentGatewaySenderDUnitTest.java
index 4425c4e..c5100f9 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ShutdownAllPersistentGatewaySenderDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/ShutdownAllPersistentGatewaySenderDUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -117,7 +117,7 @@ public class ShutdownAllPersistentGatewaySenderDUnitTest extends WANTestBase {
future.join(MAX_WAIT);
// now restart vm1 with gatewayHub
- LogWriterSupport.getLogWriter().info("restart in VM2");
+ LogWriterUtils.getLogWriter().info("restart in VM2");
vm2.invoke(WANTestBase.class, "createCache", new Object[] { nyPort });
vm3.invoke(WANTestBase.class, "createCache", new Object[] { nyPort });
AsyncInvocation vm3_future = vm3.invokeAsync(WANTestBase.class,
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WANLocatorServerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WANLocatorServerDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WANLocatorServerDUnitTest.java
index 9798739..d239cf4 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WANLocatorServerDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WANLocatorServerDUnitTest.java
@@ -37,7 +37,7 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class WANLocatorServerDUnitTest extends WANTestBase {
@@ -151,7 +151,7 @@ public class WANLocatorServerDUnitTest extends WANTestBase {
fail("Test " + test.getName() + " failed to start CacheServer on port "
+ port);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Server Started on port : " + port + " : server : " + server);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WanAutoDiscoveryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WanAutoDiscoveryDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WanAutoDiscoveryDUnitTest.java
index 6a8acb2..459b718 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WanAutoDiscoveryDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/misc/WanAutoDiscoveryDUnitTest.java
@@ -30,7 +30,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class WanAutoDiscoveryDUnitTest extends WANTestBase {
@@ -458,7 +458,7 @@ public class WanAutoDiscoveryDUnitTest extends WANTestBase {
Assert.fail("Could not get end time", e);
}
- LogWriterSupport.getLogWriter().info("Time taken for all 9 locators discovery in 3 sites: " + (endTime.longValue() - startTime));
+ LogWriterUtils.getLogWriter().info("Time taken for all 9 locators discovery in 3 sites: " + (endTime.longValue() - startTime));
vm0.invoke(WANTestBase.class, "checkAllSiteMetaDataFor3Sites",
new Object[] { dsVsPort });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderOperationsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderOperationsDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderOperationsDUnitTest.java
index 3faae76..8dbf2f7 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderOperationsDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderOperationsDUnitTest.java
@@ -24,7 +24,7 @@ import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.RMIException;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -318,38 +318,38 @@ public class ParallelGatewaySenderOperationsDUnitTest extends WANTestBase {
//make sure all the senders are running before doing any puts
waitForSendersRunning();
- LogWriterSupport.getLogWriter().info("All the senders are now started");
+ LogWriterUtils.getLogWriter().info("All the senders are now started");
//FIRST RUN: now, the senders are started. So, do some of the puts
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 200 });
- LogWriterSupport.getLogWriter().info("Done few puts");
+ LogWriterUtils.getLogWriter().info("Done few puts");
//now, stop all of the senders
stopSenders();
- LogWriterSupport.getLogWriter().info("All the senders are stopped");
+ LogWriterUtils.getLogWriter().info("All the senders are stopped");
Wait.pause(2000);
//SECOND RUN: do some of the puts after the senders are stopped
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Done some more puts in second run");
+ LogWriterUtils.getLogWriter().info("Done some more puts in second run");
//Region size on remote site should remain same and below the number of puts done in the FIRST RUN
vm2.invoke(WANTestBase.class, "validateRegionSizeRemainsSame", new Object[] {getTestMethodName() + "_PR", 200 });
//SECOND RUN: start async puts on region
AsyncInvocation async = vm4.invokeAsync(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 5000 });
- LogWriterSupport.getLogWriter().info("Started high number of puts by async thread");
+ LogWriterUtils.getLogWriter().info("Started high number of puts by async thread");
- LogWriterSupport.getLogWriter().info("Starting the senders at the same time");
+ LogWriterUtils.getLogWriter().info("Starting the senders at the same time");
//when puts are happening by another thread, start the senders
vm4.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm5.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm6.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
vm7.invokeAsync(WANTestBase.class, "startSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("All the senders are started");
+ LogWriterUtils.getLogWriter().info("All the senders are started");
async.join();
@@ -424,7 +424,7 @@ public class ParallelGatewaySenderOperationsDUnitTest extends WANTestBase {
createSendersReceiversAndPartitionedRegion(lnPort, nyPort, false, true);
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Done 1000 puts on local site");
+ LogWriterUtils.getLogWriter().info("Done 1000 puts on local site");
//Since puts are already done on userPR, it will have the buckets created.
//During sender start, it will wait until those buckets are created for shadowPR as well.
@@ -437,16 +437,16 @@ public class ParallelGatewaySenderOperationsDUnitTest extends WANTestBase {
waitForSendersRunning();
- LogWriterSupport.getLogWriter().info("Started senders on local site");
+ LogWriterUtils.getLogWriter().info("Started senders on local site");
vm4.invoke(WANTestBase.class, "doPuts", new Object[] { getTestMethodName() + "_PR", 5000 });
- LogWriterSupport.getLogWriter().info("Done 5000 puts on local site");
+ LogWriterUtils.getLogWriter().info("Done 5000 puts on local site");
vm4.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "pauseSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Paused senders on local site");
+ LogWriterUtils.getLogWriter().info("Paused senders on local site");
vm4.invoke(WANTestBase.class, "verifySenderPausedState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "verifySenderPausedState", new Object[] { "ln" });
@@ -455,13 +455,13 @@ public class ParallelGatewaySenderOperationsDUnitTest extends WANTestBase {
AsyncInvocation inv1 = vm4.invokeAsync(WANTestBase.class, "doPuts",
new Object[] { getTestMethodName() + "_PR", 1000 });
- LogWriterSupport.getLogWriter().info("Started 1000 async puts on local site");
+ LogWriterUtils.getLogWriter().info("Started 1000 async puts on local site");
vm4.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm6.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
vm7.invoke(WANTestBase.class, "resumeSender", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Resumed senders on local site");
+ LogWriterUtils.getLogWriter().info("Resumed senders on local site");
vm4.invoke(WANTestBase.class, "verifySenderResumedState", new Object[] { "ln" });
vm5.invoke(WANTestBase.class, "verifySenderResumedState", new Object[] { "ln" });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueOverflowDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueOverflowDUnitTest.java b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueOverflowDUnitTest.java
index c90d696..ee38f5a 100644
--- a/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueOverflowDUnitTest.java
+++ b/gemfire-wan/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueOverflowDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.internal.cache.RegionQueue;
import com.gemstone.gemfire.internal.cache.wan.AbstractGatewaySender;
import com.gemstone.gemfire.internal.cache.wan.WANTestBase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -124,8 +124,8 @@ public class ParallelGatewaySenderQueueOverflowDUnitTest extends WANTestBase {
long numMemVm6 = (Long) vm6.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
long numMemVm7 = (Long) vm7.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
- LogWriterSupport.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
+ LogWriterUtils.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
+ LogWriterUtils.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
long totalOverflown = numOvVm4 + numOvVm5 + numOvVm6 + numOvVm7;
//considering a memory limit of 40 MB, maximum of 40 events can be in memory. Rest should be on disk.
@@ -212,8 +212,8 @@ public class ParallelGatewaySenderQueueOverflowDUnitTest extends WANTestBase {
long numMemVm6 = (Long) vm6.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
long numMemVm7 = (Long) vm7.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
- LogWriterSupport.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
+ LogWriterUtils.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
+ LogWriterUtils.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
long totalOverflown = numOvVm4 + numOvVm5 + numOvVm6 + numOvVm7;
//considering a memory limit of 40 MB, maximum of 40 events can be in memory. Rest should be on disk.
@@ -301,8 +301,8 @@ public class ParallelGatewaySenderQueueOverflowDUnitTest extends WANTestBase {
long numMemVm6 = (Long) vm6.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
long numMemVm7 = (Long) vm7.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
- LogWriterSupport.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
+ LogWriterUtils.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
+ LogWriterUtils.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
long totalOverflown = numOvVm4 + numOvVm5 + numOvVm6 + numOvVm7;
//considering a memory limit of 40 MB, maximum of 40 events can be in memory. Rest should be on disk.
@@ -390,8 +390,8 @@ public class ParallelGatewaySenderQueueOverflowDUnitTest extends WANTestBase {
long numMemVm6 = (Long) vm6.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
long numMemVm7 = (Long) vm7.invoke(WANTestBase.class, "getNumberOfEntriesInVM", new Object[] { "ln" });
- LogWriterSupport.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
- LogWriterSupport.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
+ LogWriterUtils.getLogWriter().info("Entries overflown to disk: " + numOvVm4 + "," + numOvVm5 + "," + numOvVm6 + "," + numOvVm7);
+ LogWriterUtils.getLogWriter().info("Entries in VM: " + numMemVm4 + "," + numMemVm5 + "," + numMemVm6 + "," + numMemVm7);
long totalOverflown = numOvVm4 + numOvVm5 + numOvVm6 + numOvVm7;
//all 30 (considering redundant copies) events should accommodate in 40 MB space given to 4 senders
[11/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
index b215055..841c960 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData.S
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -87,7 +87,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
cmdResult.resetToFirstLine();
if (cmdResult != null) {
String cmdResultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testGCForGroup cmdResultStr=" + cmdResultStr + "; cmdResult=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testGCForGroup cmdResultStr=" + cmdResultStr + "; cmdResult=" + cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
if (cmdResult.getType().equals(ResultData.TYPE_TABULAR)) {
TabularResultData table = (TabularResultData) cmdResult.getResultData();
@@ -115,7 +115,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
cmdResult.resetToFirstLine();
if (cmdResult != null) {
String cmdResultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testGCForMemberID cmdResultStr=" + cmdResultStr);
+ LogWriterUtils.getLogWriter().info("testGCForMemberID cmdResultStr=" + cmdResultStr);
assertEquals(Result.Status.OK, cmdResult.getStatus());
if (cmdResult.getType().equals(ResultData.TYPE_TABULAR)) {
TabularResultData table = (TabularResultData) cmdResult.getResultData();
@@ -141,7 +141,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
if (cmdResult != null) {
String log = commandResultToString(cmdResult);
assertNotNull(log);
- LogWriterSupport.getLogWriter().info("Show Log is" + log);
+ LogWriterUtils.getLogWriter().info("Show Log is" + log);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testShowLog failed as did not get CommandResult");
@@ -163,7 +163,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
if (cmdResult != null) {
String log = commandResultToString(cmdResult);
assertNotNull(log);
- LogWriterSupport.getLogWriter().info("Show Log is" + log);
+ LogWriterUtils.getLogWriter().info("Show Log is" + log);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testShowLog failed as did not get CommandResult");
@@ -180,7 +180,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
cmdResult.resetToFirstLine();
if (cmdResult != null) {
String cmdResultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testGCForEntireCluster cmdResultStr=" + cmdResultStr + "; cmdResult=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testGCForEntireCluster cmdResultStr=" + cmdResultStr + "; cmdResult=" + cmdResult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
if (cmdResult.getType().equals(ResultData.TYPE_TABULAR)) {
TabularResultData table = (TabularResultData) cmdResult.getResultData();
@@ -237,7 +237,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
if (cmdResult != null) {
String cmdResultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testShutDownWithoutTimeout cmdResultStr=" + cmdResultStr);
+ LogWriterUtils.getLogWriter().info("testShutDownWithoutTimeout cmdResultStr=" + cmdResultStr);
}
verifyShutDown();
@@ -271,7 +271,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
if (cmdResult != null) {
String cmdResultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testShutDownWithTIMEOUT cmdResultStr=" + cmdResultStr);
+ LogWriterUtils.getLogWriter().info("testShutDownWithTIMEOUT cmdResultStr=" + cmdResultStr);
}
verifyShutDown();
@@ -308,7 +308,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
if (cmdResult != null) {
String cmdResultStr = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testShutDownForTIMEOUT cmdResultStr = " + cmdResultStr);
+ LogWriterUtils.getLogWriter().info("testShutDownForTIMEOUT cmdResultStr = " + cmdResultStr);
CommandResult result = (CommandResult) ResultBuilder.createInfoResult(CliStrings.SHUTDOWN_TIMEDOUT);
String expectedResult = commandResultToString(result);
assertEquals(expectedResult, cmdResultStr);
@@ -419,7 +419,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
String commandString = CliStrings.CHANGE_LOGLEVEL + " --" + CliStrings.CHANGE_LOGLEVEL__LOGLEVEL + "=finer" + " --" + CliStrings.CHANGE_LOGLEVEL__MEMBER + "=" + serverName1 + "," + serverName2;
CommandResult commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("testChangeLogLevel commandResult=" + commandResult);
+ LogWriterUtils.getLogWriter().info("testChangeLogLevel commandResult=" + commandResult);
assertTrue(Status.OK.equals(commandResult.getStatus()));
CompositeResultData resultData = (CompositeResultData) commandResult.getResultData();
SectionResultData section = resultData.retrieveSection("section");
@@ -475,7 +475,7 @@ public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
String commandString = CliStrings.CHANGE_LOGLEVEL + " --" + CliStrings.CHANGE_LOGLEVEL__LOGLEVEL + "=finer" + " --" + CliStrings.CHANGE_LOGLEVEL__GROUPS + "=" + grp1 + "," + grp2;
CommandResult commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("testChangeLogLevelForGrps commandResult=" + commandResult);
+ LogWriterUtils.getLogWriter().info("testChangeLogLevelForGrps commandResult=" + commandResult);
assertTrue(Status.OK.equals(commandResult.getStatus()));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
index 4dfd6e4..c8928fa 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.internal.logging.LogWriterImpl;
import com.gemstone.gemfire.management.cli.Result;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -96,11 +96,11 @@ public class MiscellaneousCommandsExportLogsPart1DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogs" + dir, null, null, logLevel, false, false, start,
end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogs command result =" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogs command result =" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogs cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogs cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testExportLogs failed as did not get CommandResult");
@@ -125,11 +125,11 @@ public class MiscellaneousCommandsExportLogsPart1DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForMerge" + dir, null, null, logLevel, false, true,
start, end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForMerge command=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForMerge command=" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForMerge cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForMerge cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
index 582b562..3d462f4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.internal.logging.LogWriterImpl;
import com.gemstone.gemfire.management.cli.Result;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -97,11 +97,11 @@ public class MiscellaneousCommandsExportLogsPart2DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForLogLevel" + dir, null, null, logLevel, false,
false, start, end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForLogLevel command=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForLogLevel command=" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForLogLevel cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForLogLevel cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testExportLogsForLogLevel failed as did not get CommandResult");
@@ -129,11 +129,11 @@ public class MiscellaneousCommandsExportLogsPart2DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForLogLevelWithUPTOLOGLEVEL" + dir, null, null,
logLevel, true, false, start, end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForLogLevelWithUPTOLOGLEVEL command=" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForLogLevelWithUPTOLOGLEVEL command=" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForLogLevelWithUPTOLOGLEVEL cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForLogLevelWithUPTOLOGLEVEL cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
index 5121846..7d67b61 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
@@ -26,7 +26,7 @@ import com.gemstone.gemfire.internal.logging.LogWriterImpl;
import com.gemstone.gemfire.management.cli.Result;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -104,10 +104,10 @@ public class MiscellaneousCommandsExportLogsPart3DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForGroup" + dir, groups, null, logLevel, false,
false, start, end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForGroup command result =" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForGroup command result =" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForGroup cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForGroup cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testExportLogsForGroup failed as did not get CommandResult");
@@ -137,11 +137,11 @@ public class MiscellaneousCommandsExportLogsPart3DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForMember" + dir, null, vm1MemberId, logLevel,
false, false, start, end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForMember command result =" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForMember command result =" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForMember cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForMember cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testExportLogsForMember failed as did not get CommandResult");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
index 1cb7e3a..2d11580 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.internal.logging.LogWriterImpl;
import com.gemstone.gemfire.management.cli.Result;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -95,11 +95,11 @@ public class MiscellaneousCommandsExportLogsPart4DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForTimeRange1" + dir, null, null, logLevel, false,
false, start, end, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForTimeRange1 command result =" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForTimeRange1 command result =" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForTimeRange1 cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForTimeRange1 cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testExportLogsForTimeRange1 failed as did not get CommandResult");
@@ -123,11 +123,11 @@ public class MiscellaneousCommandsExportLogsPart4DUnitTest extends CliCommandTes
Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForTimeRangeForOnlyStartTime" + dir, null, null,
logLevel, false, false, s, null, 1);
- LogWriterSupport.getLogWriter().info("testExportLogsForTimeRangeForOnlyStartTime command result =" + cmdResult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForTimeRangeForOnlyStartTime command result =" + cmdResult);
if (cmdResult != null) {
String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
- LogWriterSupport.getLogWriter().info("testExportLogsForTimeRangeForOnlyStartTime cmdStringRsult=" + cmdStringRsult);
+ LogWriterUtils.getLogWriter().info("testExportLogsForTimeRangeForOnlyStartTime cmdStringRsult=" + cmdStringRsult);
assertEquals(Result.Status.OK, cmdResult.getStatus());
} else {
fail("testExportLogsForTimeRangeForOnlyStartTime failed as did not get CommandResult");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/QueueCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/QueueCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/QueueCommandsDUnitTest.java
index 8da6e2b..e6c1e47 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/QueueCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/QueueCommandsDUnitTest.java
@@ -31,7 +31,7 @@ import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -378,7 +378,7 @@ public class QueueCommandsDUnitTest extends CliCommandTestBase {
executeCommand("undeploy --jar=" + fileToDelete.getName());
}
} catch (IOException e) {
- LogWriterSupport.getLogWriter().error("Unable to delete file", e);
+ LogWriterUtils.getLogWriter().error("Unable to delete file", e);
}
}
this.filesToBeDeleted.clear();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/SharedConfigurationCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/SharedConfigurationCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/SharedConfigurationCommandsDUnitTest.java
index d1c9efd..9dc9506 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/SharedConfigurationCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/SharedConfigurationCommandsDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.management.internal.configuration.SharedConfiguratio
import com.gemstone.gemfire.management.internal.configuration.domain.Configuration;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -214,23 +214,23 @@ public class SharedConfigurationCommandsDUnitTest extends CliCommandTestBase {
cmdResult = executeCommand(commandStringBuilder.getCommandString());
String resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Result\n");
- LogWriterSupport.getLogWriter().info(resultString);
+ LogWriterUtils.getLogWriter().info("#SB Result\n");
+ LogWriterUtils.getLogWriter().info(resultString);
assertEquals(true, cmdResult.getStatus().equals(Status.OK));
commandStringBuilder = new CommandStringBuilder(CliStrings.STATUS_SHARED_CONFIG);
cmdResult = executeCommand(commandStringBuilder.getCommandString());
resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Result\n");
- LogWriterSupport.getLogWriter().info(resultString);
+ LogWriterUtils.getLogWriter().info("#SB Result\n");
+ LogWriterUtils.getLogWriter().info(resultString);
assertEquals(Status.OK, cmdResult.getStatus());
commandStringBuilder = new CommandStringBuilder(CliStrings.EXPORT_SHARED_CONFIG);
commandStringBuilder.addOption(CliStrings.EXPORT_SHARED_CONFIG__FILE, sharedConfigZipFileName);
cmdResult = executeCommand(commandStringBuilder.getCommandString());
resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("#SB Result\n");
- LogWriterSupport.getLogWriter().info(resultString);
+ LogWriterUtils.getLogWriter().info("#SB Result\n");
+ LogWriterUtils.getLogWriter().info(resultString);
assertEquals(Status.OK, cmdResult.getStatus());
//Import into a running system should fail
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShellCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShellCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShellCommandsDUnitTest.java
index 1364f0e..2ff86ba 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShellCommandsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShellCommandsDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.shell.Gfsh;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import org.junit.Before;
@@ -106,7 +106,7 @@ public class ShellCommandsDUnitTest extends CliCommandTestBase {
if (gfshInstance == null) {
fail("In testEcho command gfshInstance is null");
}
- LogWriterSupport.getLogWriter().info("Gsh " + gfshInstance);
+ LogWriterUtils.getLogWriter().info("Gsh " + gfshInstance);
gfshInstance.setEnvProperty("TESTSYS", "SYS_VALUE");
printAllEnvs(gfshInstance);
@@ -336,9 +336,9 @@ public class ShellCommandsDUnitTest extends CliCommandTestBase {
if (cmdResult != null) {
assertEquals(Result.Status.OK, cmdResult.getStatus());
- LogWriterSupport.getLogWriter().info("testClearHistory cmdResult=" + commandResultToString(cmdResult));
+ LogWriterUtils.getLogWriter().info("testClearHistory cmdResult=" + commandResultToString(cmdResult));
String resultString = commandResultToString(cmdResult);
- LogWriterSupport.getLogWriter().info("testClearHistory resultString=" + resultString);
+ LogWriterUtils.getLogWriter().info("testClearHistory resultString=" + resultString);
assertTrue(resultString.contains(CliStrings.HISTORY__MSG__CLEARED_HISTORY));
assertTrue(gfshInstance.getGfshHistory().size()<= 1);
} else {
@@ -348,18 +348,18 @@ public class ShellCommandsDUnitTest extends CliCommandTestBase {
private static void printCommandOutput(CommandResult cmdResult) {
assertNotNull(cmdResult);
- LogWriterSupport.getLogWriter().info("Command Output : ");
+ LogWriterUtils.getLogWriter().info("Command Output : ");
StringBuilder sb = new StringBuilder();
cmdResult.resetToFirstLine();
while (cmdResult.hasNextLine()) {
sb.append(cmdResult.nextLine()).append(DataCommandRequest.NEW_LINE);
}
- LogWriterSupport.getLogWriter().info(sb.toString());
- LogWriterSupport.getLogWriter().info("");
+ LogWriterUtils.getLogWriter().info(sb.toString());
+ LogWriterUtils.getLogWriter().info("");
}
private void printAllEnvs(Gfsh gfsh) {
- LogWriterSupport.getLogWriter().info("printAllEnvs : " + StringUtils.objectToString(gfsh.getEnv(), false, 0));
+ LogWriterUtils.getLogWriter().info("printAllEnvs : " + StringUtils.objectToString(gfsh.getEnv(), false, 0));
/*
getLogWriter().info("Gfsh printAllEnvs : " + HydraUtil.ObjectToString(getDefaultShell().getEnv()));
getLogWriter().info("gfsh " + gfsh + " default shell " + getDefaultShell());*/
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowDeadlockDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowDeadlockDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowDeadlockDUnitTest.java
index a70405b..ba942c6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowDeadlockDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowDeadlockDUnitTest.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -113,7 +113,7 @@ public class ShowDeadlockDUnitTest extends CacheTestCase {
String deadLockOutputFromCommand = getResultAsString(result);
- LogWriterSupport.getLogWriter().info("output = " + deadLockOutputFromCommand);
+ LogWriterUtils.getLogWriter().info("output = " + deadLockOutputFromCommand);
assertEquals(true, result.hasIncomingFiles());
assertEquals(true, result.getStatus().equals(Status.OK));
assertEquals(true, deadLockOutputFromCommand.startsWith(CliStrings.SHOW_DEADLOCK__NO__DEADLOCK));
@@ -149,7 +149,7 @@ public class ShowDeadlockDUnitTest extends CacheTestCase {
Result result = commandProcessor.createCommandStatement(csb.toString(), EMPTY_ENV).process();
String deadLockOutputFromCommand = getResultAsString(result);
- LogWriterSupport.getLogWriter().info("Deadlock = " + deadLockOutputFromCommand);
+ LogWriterUtils.getLogWriter().info("Deadlock = " + deadLockOutputFromCommand);
result.saveIncomingFiles(null);
assertEquals(true, deadLockOutputFromCommand.startsWith(CliStrings.SHOW_DEADLOCK__DEADLOCK__DETECTED));
assertEquals(true, result.getStatus().equals(Status.OK));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowMetricsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowMetricsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowMetricsDUnitTest.java
index 51de395..a34b185 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowMetricsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowMetricsDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.management.internal.cli.remote.CommandProcessor;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -101,7 +101,7 @@ public class ShowMetricsDUnitTest extends CliCommandTestBase {
CommandProcessor commandProcessor = new CommandProcessor();
Result result = commandProcessor.createCommandStatement("show metrics", Collections.EMPTY_MAP).process();
String resultStr = commandResultToString((CommandResult) result);
- LogWriterSupport.getLogWriter().info(resultStr);
+ LogWriterUtils.getLogWriter().info(resultStr);
assertEquals(resultStr, true, result.getStatus().equals(Status.OK));
return resultStr;
}
@@ -113,8 +113,8 @@ public class ShowMetricsDUnitTest extends CliCommandTestBase {
String managerResult = (String) managerResultObj;
- LogWriterSupport.getLogWriter().info("#SB Manager");
- LogWriterSupport.getLogWriter().info(managerResult);
+ LogWriterUtils.getLogWriter().info("#SB Manager");
+ LogWriterUtils.getLogWriter().info(managerResult);
}
public void systemSetUp() {
@@ -160,8 +160,8 @@ public class ShowMetricsDUnitTest extends CliCommandTestBase {
String managerResult = (String) managerResultObj;
- LogWriterSupport.getLogWriter().info("#SB Manager");
- LogWriterSupport.getLogWriter().info(managerResult);
+ LogWriterUtils.getLogWriter().info("#SB Manager");
+ LogWriterUtils.getLogWriter().info(managerResult);
}
/***
@@ -258,8 +258,8 @@ public class ShowMetricsDUnitTest extends CliCommandTestBase {
String managerResult = (String) managerResultObj;
- LogWriterSupport.getLogWriter().info("#SB Manager");
- LogWriterSupport.getLogWriter().info(managerResult);
+ LogWriterUtils.getLogWriter().info("#SB Manager");
+ LogWriterUtils.getLogWriter().info(managerResult);
cs.stop();
}
@@ -299,8 +299,8 @@ public class ShowMetricsDUnitTest extends CliCommandTestBase {
String managerResult = (String) managerResultObj;
- LogWriterSupport.getLogWriter().info("#SB Manager");
- LogWriterSupport.getLogWriter().info(managerResult);
+ LogWriterUtils.getLogWriter().info("#SB Manager");
+ LogWriterUtils.getLogWriter().info(managerResult);
}
public void testShowMetricsRegionFromMemberWithCategories() throws ClassNotFoundException, IOException, InterruptedException {
@@ -339,7 +339,7 @@ public class ShowMetricsDUnitTest extends CliCommandTestBase {
String managerResult = (String) managerResultObj;
- LogWriterSupport.getLogWriter().info("#SB Manager");
- LogWriterSupport.getLogWriter().info(managerResult);
+ LogWriterUtils.getLogWriter().info("#SB Manager");
+ LogWriterUtils.getLogWriter().info(managerResult);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowStackTraceDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowStackTraceDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowStackTraceDUnitTest.java
index 4c004fa..11bd352 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowStackTraceDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ShowStackTraceDUnitTest.java
@@ -22,7 +22,7 @@ import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -94,9 +94,9 @@ public class ShowStackTraceDUnitTest extends CliCommandTestBase {
CommandStringBuilder csb = new CommandStringBuilder(CliStrings.EXPORT_STACKTRACE);
csb.addOption(CliStrings.EXPORT_STACKTRACE__FILE, allStacktracesFile.getCanonicalPath());
String commandString = csb.toString();
- LogWriterSupport.getLogWriter().info("CommandString : " + commandString);
+ LogWriterUtils.getLogWriter().info("CommandString : " + commandString);
CommandResult commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
assertTrue(commandResult.getStatus().equals(Status.OK));
File mgrStacktraceFile = new File("managerStacktrace.txt");
@@ -106,9 +106,9 @@ public class ShowStackTraceDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.EXPORT_STACKTRACE__FILE, mgrStacktraceFile.getCanonicalPath());
csb.addOption(CliStrings.EXPORT_STACKTRACE__MEMBER, "Manager");
commandString = csb.toString();
- LogWriterSupport.getLogWriter().info("CommandString : " + commandString);
+ LogWriterUtils.getLogWriter().info("CommandString : " + commandString);
commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
assertTrue(commandResult.getStatus().equals(Status.OK));
File serverStacktraceFile = new File("serverStacktrace.txt");
@@ -118,9 +118,9 @@ public class ShowStackTraceDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.EXPORT_STACKTRACE__FILE, serverStacktraceFile.getCanonicalPath());
csb.addOption(CliStrings.EXPORT_STACKTRACE__MEMBER, "Server");
commandString = csb.toString();
- LogWriterSupport.getLogWriter().info("CommandString : " + commandString);
+ LogWriterUtils.getLogWriter().info("CommandString : " + commandString);
commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
assertTrue(commandResult.getStatus().equals(Status.OK));
File groupStacktraceFile = new File("groupstacktrace.txt");
@@ -130,9 +130,9 @@ public class ShowStackTraceDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.EXPORT_STACKTRACE__FILE, groupStacktraceFile.getCanonicalPath());
csb.addOption(CliStrings.EXPORT_STACKTRACE__GROUP, "G2");
commandString = csb.toString();
- LogWriterSupport.getLogWriter().info("CommandString : " + commandString);
+ LogWriterUtils.getLogWriter().info("CommandString : " + commandString);
commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
assertTrue(commandResult.getStatus().equals(Status.OK));
File wrongStackTraceFile = new File("wrongStackTrace.txt");
@@ -142,9 +142,9 @@ public class ShowStackTraceDUnitTest extends CliCommandTestBase {
csb.addOption(CliStrings.EXPORT_STACKTRACE__FILE, wrongStackTraceFile.getCanonicalPath());
csb.addOption(CliStrings.EXPORT_STACKTRACE__MEMBER, "WrongMember");
commandString = csb.toString();
- LogWriterSupport.getLogWriter().info("CommandString : " + commandString);
+ LogWriterUtils.getLogWriter().info("CommandString : " + commandString);
commandResult = executeCommand(commandString);
- LogWriterSupport.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
+ LogWriterUtils.getLogWriter().info("Output : \n" + commandResultToString(commandResult));
assertFalse(commandResult.getStatus().equals(Status.OK));
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientIdsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientIdsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientIdsDUnitTest.java
index e33e2e0..5cbc40b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientIdsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestClientIdsDUnitTest.java
@@ -40,8 +40,8 @@ import com.gemstone.gemfire.management.internal.cli.CliUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -110,8 +110,8 @@ public class TestClientIdsDUnitTest extends DistributedTestCase {
helper.startManagingNode(managingNode);
int port = (Integer) createServerCache(server);
DistributedMember serverMember = helper.getMember(server);
- createClientCache(client, NetworkSupport.getServerHostName(server.getHost()), port);
- createClientCache(client2, NetworkSupport.getServerHostName(server.getHost()), port);
+ createClientCache(client, NetworkUtils.getServerHostName(server.getHost()), port);
+ createClientCache(client2, NetworkUtils.getServerHostName(server.getHost()), port);
put(client);
put(client2);
verifyClientIds(managingNode, serverMember, port);
@@ -233,7 +233,7 @@ public class TestClientIdsDUnitTest extends DistributedTestCase {
}
}
}catch (Exception e) {
- LogWriterSupport.getLogWriter().info("exception occured " + e.getMessage() + CliUtil.stackTraceAsString((Throwable)e));
+ LogWriterUtils.getLogWriter().info("exception occured " + e.getMessage() + CliUtil.stackTraceAsString((Throwable)e));
}
return false;
}
@@ -247,7 +247,7 @@ public class TestClientIdsDUnitTest extends DistributedTestCase {
//Now it is sure that bean would be available
CacheServerMXBean bean = MBeanUtil.getCacheServerMbeanProxy(
serverMember, serverPort);
- LogWriterSupport.getLogWriter().info("verifyClientIds = " + bean.getClientIds().length);
+ LogWriterUtils.getLogWriter().info("verifyClientIds = " + bean.getClientIds().length);
assertEquals(true, bean.getClientIds().length > 0 ? true : false);
} catch (Exception e) {
fail("Error while verifying cache server from remote member " + e);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestFunctionsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestFunctionsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestFunctionsDUnitTest.java
index 69b27c0..3bb7162 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestFunctionsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestFunctionsDUnitTest.java
@@ -24,7 +24,7 @@ import com.gemstone.gemfire.internal.cache.functions.TestFunction;
import com.gemstone.gemfire.management.DistributedSystemMXBean;
import com.gemstone.gemfire.management.ManagementService;
import com.gemstone.gemfire.management.ManagementTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -101,7 +101,7 @@ public class TestFunctionsDUnitTest extends ManagementTestBase {
});
Integer numOfRunningFunctions = (Integer) managingNode.invoke(
TestFunctionsDUnitTest.class, "getNumOfRunningFunction");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"TestNumOfFunctions numOfRunningFunctions= " + numOfRunningFunctions);
assertTrue(numOfRunningFunctions > 0 ? true : false);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestHeapDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestHeapDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestHeapDUnitTest.java
index 00b0bb9..fd161fd 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestHeapDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestHeapDUnitTest.java
@@ -19,7 +19,7 @@ package com.gemstone.gemfire.management.internal.pulse;
import com.gemstone.gemfire.management.DistributedSystemMXBean;
import com.gemstone.gemfire.management.ManagementService;
import com.gemstone.gemfire.management.ManagementTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -89,7 +89,7 @@ public class TestHeapDUnitTest extends ManagementTestBase {
long totalHeapSizeFromMXBean = ((Number) managingNode.invoke(
TestHeapDUnitTest.class, "getHeapSizeOfDS")).intValue();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testTotalHeapSize totalHeapSizeFromMXBean = "
+ totalHeapSizeFromMXBean + " totalHeapSizeOnAll = "
+ totalHeapSizeOnAll);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestLocatorsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestLocatorsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestLocatorsDUnitTest.java
index 5869d9b..24494ff 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestLocatorsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestLocatorsDUnitTest.java
@@ -19,7 +19,7 @@ package com.gemstone.gemfire.management.internal.pulse;
import com.gemstone.gemfire.management.DistributedSystemMXBean;
import com.gemstone.gemfire.management.ManagementService;
import com.gemstone.gemfire.management.ManagementTestBase;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -76,7 +76,7 @@ public class TestLocatorsDUnitTest extends ManagementTestBase {
initManagement(false);
int locatorCount = ((Number) managingNode.invoke(
TestLocatorsDUnitTest.class, "getNumOfLocatorFromMBean")).intValue();
- LogWriterSupport.getLogWriter().info("TestLocatorsDUnitTest locatorCount =" + locatorCount);
+ LogWriterUtils.getLogWriter().info("TestLocatorsDUnitTest locatorCount =" + locatorCount);
assertEquals(1, locatorCount);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestSubscriptionsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestSubscriptionsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestSubscriptionsDUnitTest.java
index a7f6575..4043ad4 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestSubscriptionsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/pulse/TestSubscriptionsDUnitTest.java
@@ -40,8 +40,8 @@ import com.gemstone.gemfire.management.ManagementTestBase;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -102,8 +102,8 @@ public class TestSubscriptionsDUnitTest extends DistributedTestCase {
int port = (Integer) createServerCache(server);
DistributedMember serverMember = helper.getMember(server);
- createClientCache(client, NetworkSupport.getServerHostName(server.getHost()), port);
- createClientCache(client2, NetworkSupport.getServerHostName(server.getHost()), port);
+ createClientCache(client, NetworkUtils.getServerHostName(server.getHost()), port);
+ createClientCache(client2, NetworkUtils.getServerHostName(server.getHost()), port);
put(client);
put(client2);
registerInterest(client);
@@ -244,7 +244,7 @@ public class TestSubscriptionsDUnitTest extends DistributedTestCase {
final DistributedSystemMXBean dsBean = ManagementService
.getExistingManagementService(cache).getDistributedSystemMXBean();
assertNotNull(dsBean);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"TestSubscriptionsDUnitTest dsBean.getNumSubscriptions() ="
+ dsBean.getNumSubscriptions());
assertTrue(dsBean.getNumSubscriptions() == 2 ? true : false);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/ClientsWithVersioningRetryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/ClientsWithVersioningRetryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/ClientsWithVersioningRetryDUnitTest.java
index 62cd89d..40592ff 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/ClientsWithVersioningRetryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/ClientsWithVersioningRetryDUnitTest.java
@@ -58,8 +58,8 @@ import com.gemstone.gemfire.internal.cache.versions.VMVersionTag;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -282,7 +282,7 @@ public class ClientsWithVersioningRetryDUnitTest extends CacheTestCase {
final VM vm2 = host.getVM(2);
final VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info("creating region in vm0");
+ LogWriterUtils.getLogWriter().info("creating region in vm0");
createRegionInPeer(vm0, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT);
vm0.invoke(new SerializableRunnable() {
@@ -294,11 +294,11 @@ public class ClientsWithVersioningRetryDUnitTest extends CacheTestCase {
}
});
- LogWriterSupport.getLogWriter().info("creating region in vm1");
+ LogWriterUtils.getLogWriter().info("creating region in vm1");
createRegionInPeer(vm1, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT);
- LogWriterSupport.getLogWriter().info("creating region in vm2");
+ LogWriterUtils.getLogWriter().info("creating region in vm2");
createRegionInPeer(vm2, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT);
- LogWriterSupport.getLogWriter().info("creating region in vm3");
+ LogWriterUtils.getLogWriter().info("creating region in vm3");
createRegionInPeer(vm3, RegionShortcut.PARTITION_PROXY);
expectedExceptions.add(IgnoredException.addIgnoredException("RuntimeException", vm2));
@@ -497,8 +497,8 @@ public class ClientsWithVersioningRetryDUnitTest extends CacheTestCase {
SerializableCallable createRegion = new SerializableCallable("create client region in " + vm) {
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port1);
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port2);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port2);
cf.setPoolPRSingleHopEnabled(false);
cf.setPoolThreadLocalConnections(threadLocalConnections);
cf.setPoolReadTimeout(10 * 60 * 1000);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/JSONPdxClientServerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/JSONPdxClientServerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/JSONPdxClientServerDUnitTest.java
index 024496d..937ff31 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/JSONPdxClientServerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/JSONPdxClientServerDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.pdx.internal.json.PdxToJSON;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.util.test.TestUtil;
@@ -595,7 +595,7 @@ public class JSONPdxClientServerDUnitTest extends CacheTestCase {
SerializableCallable createRegion = new SerializableCallable() {
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port);
cf.setPoolThreadLocalConnections(threadLocalConnections);
ClientCache cache = getClientCache(cf);
cache.createClientRegionFactory(ClientRegionShortcut.PROXY)
@@ -611,7 +611,7 @@ public class JSONPdxClientServerDUnitTest extends CacheTestCase {
SerializableCallable createRegion = new SerializableCallable() {
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port);
cf.setPoolThreadLocalConnections(threadLocalConnections);
cf.setPdxReadSerialized(isPdxReadSerialized);
ClientCache cache = getClientCache(cf);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxClientServerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxClientServerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxClientServerDUnitTest.java
index 8506459..7720890 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxClientServerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxClientServerDUnitTest.java
@@ -43,7 +43,7 @@ import com.gemstone.gemfire.internal.Version;
import com.gemstone.gemfire.pdx.internal.AutoSerializableManager;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -394,11 +394,11 @@ public class PdxClientServerDUnitTest extends CacheTestCase {
getSystem(props);
Cache cache = getCache();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(vm0.getHost()), port1);
+ pf.addServer(NetworkUtils.getServerHostName(vm0.getHost()), port1);
pf.create("pool1");
pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(vm0.getHost()), port2);
+ pf.addServer(NetworkUtils.getServerHostName(vm0.getHost()), port2);
pf.create("pool2");
AttributesFactory af = new AttributesFactory();
@@ -499,7 +499,7 @@ public class PdxClientServerDUnitTest extends CacheTestCase {
getSystem(props);
Cache cache = getCache();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(vm0.getHost()), port);
+ pf.addServer(NetworkUtils.getServerHostName(vm0.getHost()), port);
pf.create("pool");
AttributesFactory af = new AttributesFactory();
@@ -553,7 +553,7 @@ public class PdxClientServerDUnitTest extends CacheTestCase {
DataSerializer.writeObject(new SimpleClass(57, (byte) 3), out);
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(vm0.getHost()), port);
+ pf.addServer(NetworkUtils.getServerHostName(vm0.getHost()), port);
try {
pf.create("pool");
fail("should have received an exception");
@@ -704,7 +704,7 @@ public class PdxClientServerDUnitTest extends CacheTestCase {
System.setProperty("gemfire.ON_DISCONNECT_CLEAR_PDXTYPEIDS", "true");
}
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port);
cf.setPoolThreadLocalConnections(threadLocalConnections);
if(autoSerializerPatterns != null && autoSerializerPatterns.length != 0) {
cf.setPdxSerializer(new ReflectionBasedAutoSerializer(autoSerializerPatterns));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxTypeExportDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxTypeExportDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxTypeExportDUnitTest.java
index 2ea86d2..c820e72 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxTypeExportDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/PdxTypeExportDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.pdx.internal.EnumInfo;
import com.gemstone.gemfire.pdx.internal.PdxType;
import com.gemstone.gemfire.pdx.internal.TypeRegistry;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
public class PdxTypeExportDUnitTest extends CacheTestCase {
@@ -116,7 +116,7 @@ public class PdxTypeExportDUnitTest extends CacheTestCase {
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory()
.setPdxSerializer(new MyPdxSerializer())
- .addPoolServer(NetworkSupport.getServerHostName(host), port);
+ .addPoolServer(NetworkUtils.getServerHostName(host), port);
ClientCache cache = getClientCache(cf);
Region r = cache.createClientRegionFactory(ClientRegionShortcut.PROXY).create("pdxtest");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/VersionClassLoader.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/VersionClassLoader.java b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/VersionClassLoader.java
index 5462b5b..f2e6b5a 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/VersionClassLoader.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/pdx/VersionClassLoader.java
@@ -25,7 +25,7 @@ import java.lang.reflect.Constructor;
import java.net.URL;
import java.net.URLClassLoader;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class VersionClassLoader {
@@ -42,16 +42,16 @@ public class VersionClassLoader {
String alternateVersionClassPath = System.getProperty("JTESTS") +
File.separator + ".." + File.separator + ".." + File.separator +
"classes" + File.separator + "version" + classVersion;
- LogWriterSupport.getLogWriter().info("Initializing the class loader :" + alternateVersionClassPath);
+ LogWriterUtils.getLogWriter().info("Initializing the class loader :" + alternateVersionClassPath);
ClassLoader versionCL = null;
try {
versionCL = new URLClassLoader(new URL[]{new File(alternateVersionClassPath).toURI().toURL()}, cl);
Thread.currentThread().setContextClassLoader(versionCL);
} catch (Exception e) {
- LogWriterSupport.getLogWriter().info("error", e);
+ LogWriterUtils.getLogWriter().info("error", e);
throw new Exception("Failed to initialize the class loader. " + e.getMessage());
}
- LogWriterSupport.getLogWriter().info("Setting/adding class loader with " + alternateVersionClassPath);
+ LogWriterUtils.getLogWriter().info("Setting/adding class loader with " + alternateVersionClassPath);
return versionCL;
}
@@ -90,7 +90,7 @@ public class VersionClassLoader {
newObj = constructor.newInstance();
}
} catch (Exception e) {
- LogWriterSupport.getLogWriter().info("error", e);
+ LogWriterUtils.getLogWriter().info("error", e);
throw new Exception("Failed to get the class instance. ClassName: " + className + " error: ", e);
}
return newObj;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/redis/RedisDistDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/redis/RedisDistDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/redis/RedisDistDUnitTest.java
index 4eb43a4..476702c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/redis/RedisDistDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/redis/RedisDistDUnitTest.java
@@ -25,10 +25,10 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.SocketCreator;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -73,7 +73,7 @@ public class RedisDistDUnitTest extends DistributedTestCase {
client1 = host.getVM(2);
client2 = host.getVM(3);
final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
- final int locatorPort = DistributedTestSupport.getDUnitLocatorPort();
+ final int locatorPort = DistributedTestUtils.getDUnitLocatorPort();
final SerializableCallable<Object> startRedisAdapter = new SerializableCallable<Object>() {
private static final long serialVersionUID = 1978017907725504294L;
@@ -83,7 +83,7 @@ public class RedisDistDUnitTest extends DistributedTestCase {
int port = ports[VM.getCurrentVMNum()];
CacheFactory cF = new CacheFactory();
String locator = SocketCreator.getLocalHost().getHostName() + "[" + locatorPort + "]";
- cF.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ cF.set("log-level", LogWriterUtils.getDUnitLogLevel());
cF.set("redis-bind-address", localHost);
cF.set("redis-port", ""+port);
cF.set("mcast-port", "0");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthenticationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthenticationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthenticationDUnitTest.java
index ceb01ab..6f57fb6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthenticationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/security/ClientAuthenticationDUnitTest.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -215,11 +215,11 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testValidCredentials: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info("testValidCredentials: Using authinit: " + authInit);
// Start the servers
Integer locPort1 = SecurityTestUtil.getLocatorPort();
@@ -237,12 +237,12 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Start the clients with valid credentials
Properties credentials1 = gen.getValidCredentials(1);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: For first client credentials: " + credentials1
+ " : " + javaProps1);
Properties credentials2 = gen.getValidCredentials(2);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testValidCredentials: For second client credentials: "
+ credentials2 + " : " + javaProps2);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -281,11 +281,11 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("testNoCredentials: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoCredentials: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info("testNoCredentials: Using authinit: " + authInit);
+ LogWriterUtils.getLogWriter().info("testNoCredentials: Using authinit: " + authInit);
// Start the servers
Integer locPort1 = SecurityTestUtil.getLocatorPort();
@@ -303,7 +303,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Start first client with valid credentials
Properties credentials1 = gen.getValidCredentials(1);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoCredentials: For first client credentials: " + credentials1
+ " : " + javaProps1);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -346,11 +346,11 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidCredentials: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidCredentials: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("testInvalidCredentials: Using authinit: " + authInit);
// Start the servers
@@ -369,7 +369,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Start first client with valid credentials
Properties credentials1 = gen.getValidCredentials(1);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidCredentials: For first client credentials: "
+ credentials1 + " : " + javaProps1);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -385,7 +385,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// exception
Properties credentials2 = gen.getInvalidCredentials(1);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidCredentials: For second client credentials: "
+ credentials2 + " : " + javaProps2);
client2.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -404,9 +404,9 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
Properties javaProps = gen.getJavaProperties();
String authenticator = gen.getAuthenticator();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthInit: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthInit: Using authenticator: " + authenticator);
// Start the server
@@ -419,7 +419,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
Properties credentials = gen.getValidCredentials(1);
javaProps = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthInit: For first client credentials: " + credentials
+ " : " + javaProps);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -440,9 +440,9 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthInitWithCredentials: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthInitWithCredentials: Using authenticator: "
+ authenticator);
@@ -462,12 +462,12 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Start the clients with valid credentials
Properties credentials1 = gen.getValidCredentials(1);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthInitWithCredentials: For first client credentials: "
+ credentials1 + " : " + javaProps1);
Properties credentials2 = gen.getValidCredentials(2);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthInitWithCredentials: For second client credentials: "
+ credentials2 + " : " + javaProps2);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -498,9 +498,9 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
Properties javaProps = gen.getJavaProperties();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthenticator: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthenticator: Using authinit: " + authInit);
// Start the server with invalid authenticator
@@ -514,7 +514,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Trying to create the region on client should throw a security exception
Properties credentials = gen.getValidCredentials(1);
javaProps = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthenticator: For first client credentials: "
+ credentials + " : " + javaProps);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -526,7 +526,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Also test with invalid credentials
credentials = gen.getInvalidCredentials(1);
javaProps = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testInvalidAuthenticator: For first client credentials: "
+ credentials + " : " + javaProps);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -546,10 +546,10 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthenticatorWithCredentials: Using scheme: "
+ gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthenticatorWithCredentials: Using authinit: " + authInit);
// Start the servers with no authenticator
@@ -567,12 +567,12 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// valid/invalid credentials when none are required on the server side
Properties credentials1 = gen.getValidCredentials(3);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthenticatorWithCredentials: For first client credentials: "
+ credentials1 + " : " + javaProps1);
Properties credentials2 = gen.getInvalidCredentials(5);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testNoAuthenticatorWithCredentials: For second client credentials: "
+ credentials2 + " : " + javaProps2);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -602,11 +602,11 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsWithFailover: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsWithFailover: Using authenticator: " + authenticator);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsWithFailover: Using authinit: " + authInit);
// Start the first server
@@ -625,12 +625,12 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Start the clients with valid credentials
Properties credentials1 = gen.getValidCredentials(5);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsWithFailover: For first client credentials: "
+ credentials1 + " : " + javaProps1);
Properties credentials2 = gen.getValidCredentials(6);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsWithFailover: For second client credentials: "
+ credentials2 + " : " + javaProps2);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -683,7 +683,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Verify that the creation of region throws security exception
credentials1 = gen.getInvalidCredentials(7);
javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsWithFailover: For first client invalid credentials: "
+ credentials1 + " : " + javaProps1);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -711,12 +711,12 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
String authenticator = gen.getAuthenticator();
String authInit = gen.getAuthInit();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: Using scheme: " + gen.classCode());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: Using authenticator: "
+ authenticator);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: Using authinit: " + authInit);
// Start the first server
@@ -735,12 +735,12 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Start the clients with valid credentials
Properties credentials1 = gen.getValidCredentials(3);
Properties javaProps1 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: For first client credentials: "
+ credentials1 + " : " + javaProps1);
Properties credentials2 = gen.getValidCredentials(4);
Properties javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: For second client credentials: "
+ credentials2 + " : " + javaProps2);
client1.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -806,7 +806,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
// Verify that the creation of region throws security exception
credentials2 = gen.getInvalidCredentials(3);
javaProps2 = gen.getJavaProperties();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: For second client invalid credentials: "
+ credentials2 + " : " + javaProps2);
client2.invoke(ClientAuthenticationDUnitTest.class, "createCacheClient",
@@ -846,7 +846,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
new Integer(SecurityTestUtil.AUTHFAIL_EXCEPTION) });
}
else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: Skipping invalid authenticator for scheme ["
+ gen.classCode() + "] which has no authInit");
}
@@ -877,7 +877,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
new Integer(SecurityTestUtil.AUTHREQ_EXCEPTION) });
}
else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: Skipping null authInit for scheme ["
+ gen.classCode() + "] which has no authInit");
}
@@ -937,7 +937,7 @@ public class ClientAuthenticationDUnitTest extends DistributedTestCase {
new Object[] { new Integer(4) });
}
else {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"testCredentialsForNotifications: Skipping scheme ["
+ gen.classCode() + "] which has no authenticator");
}
[26/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientRegisterInterestDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientRegisterInterestDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientRegisterInterestDUnitTest.java
index da155f1..a734a10 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientRegisterInterestDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientRegisterInterestDUnitTest.java
@@ -26,8 +26,8 @@ import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -63,7 +63,7 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
final VM vm = Host.getHost(0).getVM(whichVM);
vm.invoke(new CacheSerializableRunnable("Create bridge server") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("[testBug35381] Create BridgeServer");
+ LogWriterUtils.getLogWriter().info("[testBug35381] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -76,21 +76,21 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
bridgeServerPort = startBridgeServer(0);
}
catch (IOException e) {
- LogWriterSupport.getLogWriter().error("startBridgeServer threw IOException", e);
+ LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException " + e.getMessage());
}
assertTrue(bridgeServerPort != 0);
- LogWriterSupport.getLogWriter().info("[testBug35381] port=" + bridgeServerPort);
- LogWriterSupport.getLogWriter().info("[testBug35381] serverMemberId=" + getMemberId());
+ LogWriterUtils.getLogWriter().info("[testBug35381] port=" + bridgeServerPort);
+ LogWriterUtils.getLogWriter().info("[testBug35381] serverMemberId=" + getMemberId());
}
});
ports[whichVM] = vm.invokeInt(ClientRegisterInterestDUnitTest.class,
"getBridgeServerPort");
assertTrue(ports[whichVM] != 0);
- LogWriterSupport.getLogWriter().info("[testBug35381] create bridge client");
+ LogWriterUtils.getLogWriter().info("[testBug35381] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
@@ -100,9 +100,9 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- LogWriterSupport.getLogWriter().info("[testBug35381] creating connection pool");
+ LogWriterUtils.getLogWriter().info("[testBug35381] creating connection pool");
boolean establishCallbackConnection = false; // SOURCE OF BUG 35381
- ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, establishCallbackConnection, -1, -1, null);
+ ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, establishCallbackConnection, -1, -1, null);
Region region = createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
try {
@@ -148,7 +148,7 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
final VM firstServerVM = Host.getHost(0).getVM(firstServerIdx);
firstServerVM.invoke(new CacheSerializableRunnable("Create first bridge server") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] Create first bridge server");
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] Create first bridge server");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -163,15 +163,15 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
bridgeServerPort = startBridgeServer(0);
}
catch (IOException e) {
- LogWriterSupport.getLogWriter().error("startBridgeServer threw IOException", e);
+ LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException " + e.getMessage());
}
assertTrue(bridgeServerPort != 0);
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] " +
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] " +
"firstServer port=" + bridgeServerPort);
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] " +
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] " +
"firstServer memberId=" + getMemberId());
}
});
@@ -181,7 +181,7 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
final VM secondServerVM = Host.getHost(0).getVM(secondServerIdx);
secondServerVM.invoke(new CacheSerializableRunnable("Create second bridge server") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] Create second bridge server");
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] Create second bridge server");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
@@ -194,15 +194,15 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
bridgeServerPort = startBridgeServer(0);
}
catch (IOException e) {
- LogWriterSupport.getLogWriter().error("startBridgeServer threw IOException", e);
+ LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException " + e.getMessage());
}
assertTrue(bridgeServerPort != 0);
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] " +
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] " +
"secondServer port=" + bridgeServerPort);
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] " +
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] " +
"secondServer memberId=" + getMemberId());
}
});
@@ -224,7 +224,7 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
});
// create the bridge client
- LogWriterSupport.getLogWriter().info("[testBug35654] create bridge client");
+ LogWriterUtils.getLogWriter().info("[testBug35654] create bridge client");
Properties config = new Properties();
config.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
config.setProperty(DistributionConfig.LOCATORS_NAME, "");
@@ -234,9 +234,9 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
- LogWriterSupport.getLogWriter().info("[testRegisterInterestFailover] creating connection pool");
+ LogWriterUtils.getLogWriter().info("[testRegisterInterestFailover] creating connection pool");
boolean establishCallbackConnection = true;
- final PoolImpl p = (PoolImpl)ClientServerTestCase.configureConnectionPool(factory, NetworkSupport.getServerHostName(host), ports, establishCallbackConnection, -1, -1, null);
+ final PoolImpl p = (PoolImpl)ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), ports, establishCallbackConnection, -1, -1, null);
final Region region1 = createRootRegion(regionName1, factory.create());
final Region region2 = createRootRegion(regionName2, factory.create());
@@ -311,7 +311,7 @@ public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
startBridgeServer(ports[secondServerIdx]);
}
catch (IOException e) {
- LogWriterSupport.getLogWriter().error("startBridgeServer threw IOException", e);
+ LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException " + e.getMessage());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerCCEDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerCCEDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerCCEDUnitTest.java
index 7fdd975..fb319b6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerCCEDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerCCEDUnitTest.java
@@ -46,8 +46,8 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientNotifier;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -156,19 +156,19 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
createEntries(vm0);
destroyEntries(vm0);
- LogWriterSupport.getLogWriter().info("***************** register interest on all keys");
+ LogWriterUtils.getLogWriter().info("***************** register interest on all keys");
createClientRegion(vm2, name, port, true);
registerInterest(vm2);
ensureAllTombstonesPresent(vm2);
- LogWriterSupport.getLogWriter().info("***************** clear cache and register interest on one key, Object0");
+ LogWriterUtils.getLogWriter().info("***************** clear cache and register interest on one key, Object0");
clearLocalCache(vm2);
registerInterestOneKey(vm2, "Object0");
List<String> keys = new ArrayList(1);
keys.add("Object0");
ensureAllTombstonesPresent(vm2, keys);
- LogWriterSupport.getLogWriter().info("***************** clear cache and register interest on four keys");
+ LogWriterUtils.getLogWriter().info("***************** clear cache and register interest on four keys");
clearLocalCache(vm2);
keys = new ArrayList(4);
for (int i=0; i<4; i++) {
@@ -177,12 +177,12 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
registerInterest(vm2, keys);
ensureAllTombstonesPresent(vm2, keys);
- LogWriterSupport.getLogWriter().info("***************** clear cache and register interest with regex on four keys");
+ LogWriterUtils.getLogWriter().info("***************** clear cache and register interest with regex on four keys");
clearLocalCache(vm2);
registerInterestRegex(vm2, "Object[0-3]");
ensureAllTombstonesPresent(vm2, keys);
- LogWriterSupport.getLogWriter().info("***************** fetch entries with getAll()");
+ LogWriterUtils.getLogWriter().info("***************** fetch entries with getAll()");
clearLocalCache(vm2);
getAll(vm2);
ensureAllTombstonesPresent(vm2);
@@ -209,19 +209,19 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
createEntries(vm0);
invalidateEntries(vm0);
- LogWriterSupport.getLogWriter().info("***************** register interest on all keys");
+ LogWriterUtils.getLogWriter().info("***************** register interest on all keys");
createClientRegion(vm2, name, port, true);
registerInterest(vm2);
ensureAllInvalidsPresent(vm2);
- LogWriterSupport.getLogWriter().info("***************** clear cache and register interest on one key, Object0");
+ LogWriterUtils.getLogWriter().info("***************** clear cache and register interest on one key, Object0");
clearLocalCache(vm2);
registerInterestOneKey(vm2, "Object0");
List<String> keys = new ArrayList(1);
keys.add("Object0");
ensureAllInvalidsPresent(vm2, keys);
- LogWriterSupport.getLogWriter().info("***************** clear cache and register interest on four keys");
+ LogWriterUtils.getLogWriter().info("***************** clear cache and register interest on four keys");
clearLocalCache(vm2);
keys = new ArrayList(4);
for (int i=0; i<4; i++) {
@@ -230,12 +230,12 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
registerInterest(vm2, keys);
ensureAllInvalidsPresent(vm2, keys);
- LogWriterSupport.getLogWriter().info("***************** clear cache and register interest with regex on four keys");
+ LogWriterUtils.getLogWriter().info("***************** clear cache and register interest with regex on four keys");
clearLocalCache(vm2);
registerInterestRegex(vm2, "Object[0-3]");
ensureAllInvalidsPresent(vm2, keys);
- LogWriterSupport.getLogWriter().info("***************** fetch entries with getAll()");
+ LogWriterUtils.getLogWriter().info("***************** fetch entries with getAll()");
clearLocalCache(vm2);
getAll(vm2);
ensureAllInvalidsPresent(vm2);
@@ -482,8 +482,8 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
@Override
public boolean done() {
- LogWriterSupport.getLogWriter().info("tombstone count = " + TestRegion.getTombstoneCount());
- LogWriterSupport.getLogWriter().info("region size = " + TestRegion.size());
+ LogWriterUtils.getLogWriter().info("tombstone count = " + TestRegion.getTombstoneCount());
+ LogWriterUtils.getLogWriter().info("region size = " + TestRegion.size());
return TestRegion.getTombstoneCount() == 0 && TestRegion.size() == 0;
}
@@ -518,7 +518,7 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
// if (first) {
// ((LocalRegion)proxy.getHARegion()).dumpBackingMap();
// }
- LogWriterSupport.getLogWriter().info("queue size ("+size+") is still > 0 for " + proxy.getProxyID());
+ LogWriterUtils.getLogWriter().info("queue size ("+size+") is still > 0 for " + proxy.getProxyID());
return false;
}
}
@@ -526,7 +526,7 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
// also ensure that server regions have been cleaned up
int regionEntryCount = TestRegion.getRegionMap().size();
if (regionEntryCount > 0) {
- LogWriterSupport.getLogWriter().info("TestRegion has unexpected entries - all should have been GC'd but we have " + regionEntryCount);
+ LogWriterUtils.getLogWriter().info("TestRegion has unexpected entries - all should have been GC'd but we have " + regionEntryCount);
TestRegion.dumpBackingMap();
return false;
}
@@ -549,7 +549,7 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
vm.invoke(new SerializableCallable("check that GC did not happen") {
public Object call() throws Exception {
if (TestRegion.getTombstoneCount() == 0) {
- LogWriterSupport.getLogWriter().warning("region has no tombstones");
+ LogWriterUtils.getLogWriter().warning("region has no tombstones");
// TestRegion.dumpBackingMap();
throw new AssertionFailedError("expected to find tombstones but region is empty");
}
@@ -590,9 +590,9 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
SerializableCallable createRegion = new SerializableCallable() {
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port);
cf.setPoolSubscriptionEnabled(true);
- cf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ cf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cache = getClientCache(cf);
ClientRegionFactory crf = cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
crf.setConcurrencyChecksEnabled(ccEnabled);
@@ -611,14 +611,14 @@ public class ClientServerCCEDUnitTest extends CacheTestCase {
SerializableCallable createRegion = new SerializableCallable() {
public Object call() throws Exception {
ClientCacheFactory cf = new ClientCacheFactory();
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port1);
- cf.addPoolServer(NetworkSupport.getServerHostName(vm.getHost()), port2);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port1);
+ cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port2);
cf.setPoolSubscriptionEnabled(true);
cf.setPoolSubscriptionRedundancy(1);
// bug #50683 - secondary durable queue retains all GC messages
cf.set("durable-client-id", ""+vm.getPid());
cf.set("durable-client-timeout", "" + 200);
- cf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ cf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cache = getClientCache(cf);
ClientRegionFactory crf = cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
crf.setConcurrencyChecksEnabled(ccEnabled);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerTestCase.java
index c3c0a8c..5c2f8a2 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ClientServerTestCase.java
@@ -35,7 +35,7 @@ import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.AvailablePortHelper;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -208,7 +208,7 @@ public class ClientServerTestCase extends CacheTestCase {
boolean threadLocalCnxs, int lifetimeTimeout, int statisticInterval) {
if(AUTO_LOAD_BALANCE) {
- pf.addLocator(host,DistributedTestSupport.getDUnitLocatorPort());
+ pf.addLocator(host,DistributedTestUtils.getDUnitLocatorPort());
} else {
for(int z=0;z<ports.length;z++) {
pf.addServer(host,ports[z]);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ConcurrentLeaveDuringGIIDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ConcurrentLeaveDuringGIIDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ConcurrentLeaveDuringGIIDUnitTest.java
index f283bb8..c2f06c1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ConcurrentLeaveDuringGIIDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/ConcurrentLeaveDuringGIIDUnitTest.java
@@ -26,7 +26,7 @@ import com.gemstone.gemfire.internal.cache.InitialImageOperation;
import com.gemstone.gemfire.internal.cache.InitialImageOperation.GIITestHook;
import com.gemstone.gemfire.internal.cache.InitialImageOperation.GIITestHookType;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -157,7 +157,7 @@ public class ConcurrentLeaveDuringGIIDUnitTest extends CacheTestCase {
// ensure that the RVV has recorded the event
DistributedRegion r = (DistributedRegion)getCache().getRegion(regionName);
if (!r.getVersionVector().contains(Xid, 1)) {
- LogWriterSupport.getLogWriter().info("r's version vector is " + r.getVersionVector().fullToString());
+ LogWriterUtils.getLogWriter().info("r's version vector is " + r.getVersionVector().fullToString());
((LocalRegion)r).dumpBackingMap();
}
assertTrue(r.containsKey("keyFromX"));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DiskRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DiskRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DiskRegionDUnitTest.java
index 84b9385..817943a 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DiskRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DiskRegionDUnitTest.java
@@ -48,7 +48,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.lru.LRUCapacityController;
import com.gemstone.gemfire.internal.cache.lru.LRUStatistics;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -143,7 +143,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
flush(region);
- LogWriterSupport.getLogWriter().info("DEBUG: writes=" + diskStats.getWrites()
+ LogWriterUtils.getLogWriter().info("DEBUG: writes=" + diskStats.getWrites()
+ " reads=" + diskStats.getReads()
+ " evictions=" + lruStats.getEvictions()
+ " total=" + total
@@ -162,7 +162,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
assertNotNull(value);
assertEquals(0, ((int[]) value)[0]);
- LogWriterSupport.getLogWriter().info("DEBUG: writes=" + diskStats.getWrites()
+ LogWriterUtils.getLogWriter().info("DEBUG: writes=" + diskStats.getWrites()
+ " reads=" + diskStats.getReads()
+ " evictions=" + lruStats.getEvictions()
+ " total=" + total
@@ -374,7 +374,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
// DiskRegion dr = region.getDiskRegion();
LRUStatistics lruStats = getLRUStats(region);
for (int i = 0; lruStats.getEvictions() < 10; i++) {
- LogWriterSupport.getLogWriter().info("Put " + i);
+ LogWriterUtils.getLogWriter().info("Put " + i);
region.put(new Integer(i), new byte[1]);
}
@@ -435,7 +435,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
long evictions = lruStats.getEvictions();
- LogWriterSupport.getLogWriter().info("Destroying memory resident entries");
+ LogWriterUtils.getLogWriter().info("Destroying memory resident entries");
// Destroying each of these guys should have no effect on the disk
for (int i = total - 1; i >= evictions; i--) {
region.destroy(new Integer(i));
@@ -446,7 +446,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
// long startRemoves = diskStats.getRemoves();
- LogWriterSupport.getLogWriter().info("Destroying disk-resident entries. evictions=" + evictions);
+ LogWriterUtils.getLogWriter().info("Destroying disk-resident entries. evictions=" + evictions);
// Destroying each of these guys should cause a removal from disk
for (int i = ((int) evictions) - 1; i >= 0; i--) {
@@ -458,7 +458,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
assertEquals(evictions, lruStats.getEvictions());
- LogWriterSupport.getLogWriter().info("keys remaining in region: " + region.keys().size());
+ LogWriterUtils.getLogWriter().info("keys remaining in region: " + region.keys().size());
assertEquals(0, region.keys().size());
}
@@ -970,7 +970,7 @@ public class DiskRegionDUnitTest extends CacheTestCase {
// DiskRegion dr = region.getDiskRegion();
LRUStatistics lruStats = getLRUStats(region);
for (int i = 0; lruStats.getEvictions() < 10; i++) {
- LogWriterSupport.getLogWriter().info("Put " + i);
+ LogWriterUtils.getLogWriter().info("Put " + i);
region.put(new Integer(i), new byte[1]);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
index 10abe4e..98b997d 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -199,8 +199,8 @@ public class DistAckMapMethodsDUnitTest extends DistributedTestCase{
vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);//to make sure that vm1 region has the entry
obj2 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "removeMethod", objArr);
- LogWriterSupport.getLogWriter().fine("111111111"+obj1);
- LogWriterSupport.getLogWriter().fine("2222222222"+obj2);
+ LogWriterUtils.getLogWriter().fine("111111111"+obj1);
+ LogWriterUtils.getLogWriter().fine("2222222222"+obj2);
if (obj1 == null)
fail("region1.getMethod returned null");
if(!(obj1.equals(obj2))){
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedAckRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedAckRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedAckRegionDUnitTest.java
index 93ffa43..f957e49 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedAckRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedAckRegionDUnitTest.java
@@ -27,7 +27,7 @@ import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -62,7 +62,7 @@ public class DistributedAckRegionDUnitTest extends MultiVMRegionTestCase {
public Properties getDistributedSystemProperties() {
Properties p = new Properties();
p.put(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true");
- p.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ p.put(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
return p;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
index 0036cf3..a1cc2cd 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -174,25 +174,25 @@ public class DistributedNoAckRegionCCEDUnitTest extends
if (event.isOriginRemote()) {
synchronized(this) {
while (ListenerBlocking) {
- LogWriterSupport.getLogWriter().info("blocking cache operations for " + event.getDistributedMember());
+ LogWriterUtils.getLogWriter().info("blocking cache operations for " + event.getDistributedMember());
blocked = true;
try {
wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- LogWriterSupport.getLogWriter().info("blocking cache listener interrupted");
+ LogWriterUtils.getLogWriter().info("blocking cache listener interrupted");
return;
}
}
}
if (blocked) {
- LogWriterSupport.getLogWriter().info("allowing cache operations for " + event.getDistributedMember());
+ LogWriterUtils.getLogWriter().info("allowing cache operations for " + event.getDistributedMember());
}
}
}
@Override
public void close() {
- LogWriterSupport.getLogWriter().info("closing blocking listener");
+ LogWriterUtils.getLogWriter().info("closing blocking listener");
ListenerBlocking = false;
synchronized(this) {
notifyAll();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionDUnitTest.java
index adf0ba9..1d680f0 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionDUnitTest.java
@@ -31,9 +31,9 @@ import com.gemstone.gemfire.internal.cache.StateFlushOperation;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -184,12 +184,12 @@ public class DistributedNoAckRegionDUnitTest
SerializableRunnable create = new
CacheSerializableRunnable("Create Mirrored Region") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("testBug30705: Start creating Mirrored Region");
+ LogWriterUtils.getLogWriter().info("testBug30705: Start creating Mirrored Region");
AttributesFactory factory =
new AttributesFactory(getRegionAttributes());
factory.setDataPolicy(DataPolicy.REPLICATE);
createRegion(name, factory.create());
- LogWriterSupport.getLogWriter().info("testBug30705: Finished creating Mirrored Region");
+ LogWriterUtils.getLogWriter().info("testBug30705: Finished creating Mirrored Region");
}
};
@@ -201,14 +201,14 @@ public class DistributedNoAckRegionDUnitTest
Object key = new Integer(0x42);
Object value = new byte[0];
assertNotNull(value);
- LogWriterSupport.getLogWriter().info("testBug30705: Started Distributed NoAck Puts");
+ LogWriterUtils.getLogWriter().info("testBug30705: Started Distributed NoAck Puts");
for (int i = 0; i < NUM_PUTS; i++) {
if (stopPutting) {
- LogWriterSupport.getLogWriter().info("testBug30705: Interrupted Distributed Ack Puts after " + i + " PUTS");
+ LogWriterUtils.getLogWriter().info("testBug30705: Interrupted Distributed Ack Puts after " + i + " PUTS");
break;
}
if ((i % 1000) == 0) {
- LogWriterSupport.getLogWriter().info("testBug30705: modification #" + i);
+ LogWriterUtils.getLogWriter().info("testBug30705: modification #" + i);
}
rgn.put(key, value);
}
@@ -220,18 +220,18 @@ public class DistributedNoAckRegionDUnitTest
vm0.invoke(new CacheSerializableRunnable("Put data") {
public void run2() throws CacheException {
- LogWriterSupport.getLogWriter().info("testBug30705: starting initial data load");
+ LogWriterUtils.getLogWriter().info("testBug30705: starting initial data load");
Region region =
getRootRegion().getSubregion(name);
final byte[] value = new byte[valueSize];
Arrays.fill(value, (byte)0x42);
for (int i = 0; i < numEntries; i++) {
if ((i % 1000) == 0) {
- LogWriterSupport.getLogWriter().info("testBug30705: initial put #" + i);
+ LogWriterUtils.getLogWriter().info("testBug30705: initial put #" + i);
}
region.put(new Integer(i), value);
}
- LogWriterSupport.getLogWriter().info("testBug30705: finished initial data load");
+ LogWriterUtils.getLogWriter().info("testBug30705: finished initial data load");
}
});
@@ -240,19 +240,19 @@ public class DistributedNoAckRegionDUnitTest
// do initial image
try {
- LogWriterSupport.getLogWriter().info("testBug30705: before the critical create");
+ LogWriterUtils.getLogWriter().info("testBug30705: before the critical create");
vm2.invoke(create);
- LogWriterSupport.getLogWriter().info("testBug30705: after the critical create");
+ LogWriterUtils.getLogWriter().info("testBug30705: after the critical create");
} finally {
// test passes if this does not hang
- LogWriterSupport.getLogWriter().info("testBug30705: INTERRUPTING Distributed NoAck Puts after GetInitialImage");
+ LogWriterUtils.getLogWriter().info("testBug30705: INTERRUPTING Distributed NoAck Puts after GetInitialImage");
vm0.invoke(new SerializableRunnable("Interrupt Puts") {
public void run() {
- LogWriterSupport.getLogWriter().info("testBug30705: interrupting putter");
+ LogWriterUtils.getLogWriter().info("testBug30705: interrupting putter");
stopPutting = true;
}
});
- Threads.join(async, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
// wait for overflow queue to quiesce before continuing
vm2.invoke(new SerializableRunnable("Wait for Overflow Queue") {
public void run() {
@@ -274,7 +274,7 @@ public class DistributedNoAckRegionDUnitTest
}
});
} // finally
- LogWriterSupport.getLogWriter().info("testBug30705: at end of test");
+ LogWriterUtils.getLogWriter().info("testBug30705: at end of test");
if (async.exceptionOccurred()) {
Assert.fail("Got exception", async.getException());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DynamicRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DynamicRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DynamicRegionDUnitTest.java
index c507437..3d62b31 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DynamicRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/DynamicRegionDUnitTest.java
@@ -32,7 +32,7 @@ import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.internal.OSProcess;
import com.gemstone.gemfire.internal.cache.xmlcache.CacheCreation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -74,7 +74,7 @@ public class DynamicRegionDUnitTest extends CacheTestCase {
*/
@Override
protected final void preTearDownCacheTestCase() throws Exception {
- LogWriterSupport.getLogWriter().info("Running tearDown in " + this);
+ LogWriterUtils.getLogWriter().info("Running tearDown in " + this);
try {
//Asif destroy dynamic regions at the end of the test
CacheSerializableRunnable destroyDynRegn = new CacheSerializableRunnable("Destroy Dynamic regions") {
@@ -98,7 +98,7 @@ public class DynamicRegionDUnitTest extends CacheTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().severe("tearDown in " + this + " failed due to " + t);
+ LogWriterUtils.getLogWriter().severe("tearDown in " + this + " failed due to " + t);
}
finally {
try {
@@ -109,11 +109,11 @@ public class DynamicRegionDUnitTest extends CacheTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().severe("tearDown in " + this + " failed to disconnect all DS due to " + t);
+ LogWriterUtils.getLogWriter().severe("tearDown in " + this + " failed to disconnect all DS due to " + t);
}
}
if (! DynamicRegionFactory.get().isClosed()) {
- LogWriterSupport.getLogWriter().severe("DynamicRegionFactory not closed!", new Exception());
+ LogWriterUtils.getLogWriter().severe("DynamicRegionFactory not closed!", new Exception());
}
}
@@ -231,7 +231,7 @@ public class DynamicRegionDUnitTest extends CacheTestCase {
DynamicRegionFactory.get().createDynamicRegion(drFullPath, "subregion" + i);
}
- LogWriterSupport.getLogWriter().info("testPeerRegion - check #1 make sure other region has new dynamic subregion");
+ LogWriterUtils.getLogWriter().info("testPeerRegion - check #1 make sure other region has new dynamic subregion");
checkForRegionOtherVm(drFullPath, true);
// spot check the subregions
@@ -240,13 +240,13 @@ public class DynamicRegionDUnitTest extends CacheTestCase {
// now see if OTHER can recreate which should fetch meta-info from controller
recreateOtherVm();
- LogWriterSupport.getLogWriter().info("testPeerRegion - check #2 make sure other region has dynamic region after restarting through getInitialImage");
+ LogWriterUtils.getLogWriter().info("testPeerRegion - check #2 make sure other region has dynamic region after restarting through getInitialImage");
checkForRegionOtherVm(drFullPath, true);
// now close the controller and see if OTHER can still fetch meta-info from disk
closeCache();
recreateOtherVm();
- LogWriterSupport.getLogWriter().info("testPeerRegion - check #3 make sure dynamic region can be recovered from disk");
+ LogWriterUtils.getLogWriter().info("testPeerRegion - check #3 make sure dynamic region can be recovered from disk");
checkForRegionOtherVm(drFullPath, true);
for (int i=0; i<10; i++) {
checkForSubregionOtherVm(drFullPath + "/subregion" + i, true);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalLockingDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalLockingDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalLockingDUnitTest.java
index a1d3ea9..0830b7e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalLockingDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalLockingDUnitTest.java
@@ -29,7 +29,7 @@ import com.gemstone.gemfire.cache.RegionExistsException;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.TimeoutException;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -72,13 +72,13 @@ public class GlobalLockingDUnitTest extends CacheTestCase {
* Tests for 32356 R2 tryLock w/ 0 timeout broken in Distributed Lock Service
*/
public void testBug32356() throws Exception {
- LogWriterSupport.getLogWriter().fine("[testBug32356]");
+ LogWriterUtils.getLogWriter().fine("[testBug32356]");
Host host = Host.getHost(0);
final String name = this.getUniqueName();
final Object key = "32356";
// lock/unlock '32356' in all vms... (make all vms aware of token)
- LogWriterSupport.getLogWriter().fine("[testBug32356] lock/unlock '32356' in all vms");
+ LogWriterUtils.getLogWriter().fine("[testBug32356] lock/unlock '32356' in all vms");
for (int i = 0; i < 4; i++) {
final int vm = i;
host.getVM(vm).invoke(new CacheSerializableRunnable("testBug32356_step1") {
@@ -93,7 +93,7 @@ public class GlobalLockingDUnitTest extends CacheTestCase {
}
// attempt try-lock of zero wait time in all vms
- LogWriterSupport.getLogWriter().fine("[testBug32356] attempt try-lock of zero wait time in all vms");
+ LogWriterUtils.getLogWriter().fine("[testBug32356] attempt try-lock of zero wait time in all vms");
for (int i = 0; i < 4; i++) {
final int vm = i;
host.getVM(vm).invoke(new CacheSerializableRunnable("testBug32356_step2") {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalRegionDUnitTest.java
index 8b7dc98..008f951 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/GlobalRegionDUnitTest.java
@@ -34,9 +34,9 @@ import com.gemstone.gemfire.cache.TimeoutException;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -256,7 +256,7 @@ public class GlobalRegionDUnitTest extends MultiVMRegionTestCase {
Thread thread = new Thread(group, new Runnable() {
public void run() {
try {
- LogWriterSupport.getLogWriter().info("testSynchronousIncrements." + this);
+ LogWriterUtils.getLogWriter().info("testSynchronousIncrements." + this);
final Random rand = new Random(System.identityHashCode(this));
try {
Region region = getRootRegion().getSubregion(name);
@@ -280,7 +280,7 @@ public class GlobalRegionDUnitTest extends MultiVMRegionTestCase {
region.put(key, value);
assertEquals(value, region.get(key));
- LogWriterSupport.getLogWriter().info("testSynchronousIncrements." +
+ LogWriterUtils.getLogWriter().info("testSynchronousIncrements." +
this + ": " + key + " -> " + value);
lock.unlock();
}
@@ -296,7 +296,7 @@ public class GlobalRegionDUnitTest extends MultiVMRegionTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().info("testSynchronousIncrements." +
+ LogWriterUtils.getLogWriter().info("testSynchronousIncrements." +
this + " caught Throwable", t);
}
}
@@ -306,7 +306,7 @@ public class GlobalRegionDUnitTest extends MultiVMRegionTestCase {
}
for (int i = 0; i < threads.length; i++) {
- Threads.join(threads[i], 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(threads[i], 30 * 1000);
}
}
};
@@ -317,7 +317,7 @@ public class GlobalRegionDUnitTest extends MultiVMRegionTestCase {
}
for (int i = 0; i < vmCount; i++) {
- Threads.join(invokes[i], 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(invokes[i], 5 * 60 * 1000);
if (invokes[i].exceptionOccurred()) {
Assert.fail("invocation failed", invokes[i].getException());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/LRUEvictionControllerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/LRUEvictionControllerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/LRUEvictionControllerDUnitTest.java
index 1246077..b3a85bc 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/LRUEvictionControllerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/LRUEvictionControllerDUnitTest.java
@@ -46,7 +46,7 @@ import com.gemstone.gemfire.internal.cache.control.InternalResourceManager.Resou
import com.gemstone.gemfire.internal.cache.lru.HeapEvictor;
import com.gemstone.gemfire.internal.cache.lru.LRUStatistics;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -304,7 +304,7 @@ public class LRUEvictionControllerDUnitTest extends CacheTestCase {
CacheListenerAdapter() {
public void afterCreate(EntryEvent event) {
try {
- LogWriterSupport.getLogWriter().info("AFTER CREATE");
+ LogWriterUtils.getLogWriter().info("AFTER CREATE");
region.put(key, value2);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/MultiVMRegionTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/MultiVMRegionTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/MultiVMRegionTestCase.java
index 9a29df0..d3b065c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/MultiVMRegionTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/MultiVMRegionTestCase.java
@@ -114,12 +114,12 @@ import com.gemstone.gemfire.internal.offheap.MemoryChunkWithRefCount;
import com.gemstone.gemfire.internal.offheap.SimpleMemoryAllocatorImpl;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.RMIException;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -462,7 +462,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
Object value = e.getNewValue();
assertNotNull(value);
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("++ Adding " + value);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("++ Adding " + value);
queue.put(value);
} catch (InterruptedException ex) {
@@ -482,9 +482,9 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
(LinkedBlockingQueue) region.getUserAttribute();
for (int i = 0; i <= lastNumber; i++) {
try {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("++ Waiting for " + i);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("++ Waiting for " + i);
Integer value = (Integer) queue.take();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("++ Got " + value);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("++ Got " + value);
assertEquals(i, value.intValue());
} catch (InterruptedException ex) {
@@ -506,8 +506,8 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
});
- Threads.join(ai0, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
- Threads.join(ai1, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(ai0, 30 * 1000);
+ ThreadUtils.join(ai1, 30 * 1000);
if (ai0.exceptionOccurred()) {
com.gemstone.gemfire.test.dunit.Assert.fail("ai0 failed", ai0.getException());
@@ -983,7 +983,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
// changed from severe to fine because it is possible
// for this to return non-null on d-no-ack
// that is was invokeRepeatingIfNecessary is called
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().fine("invalidated entry has value of " + entry.getValue());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().fine("invalidated entry has value of " + entry.getValue());
}
assertNull(entry.getValue());
}
@@ -3316,7 +3316,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
assertTrue(index >= 0);
assertEquals(expectedValues.remove(index), event.getNewValue());
expectedKeys.remove(index);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("afterCreate called in " +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("afterCreate called in " +
"MirroredDataFromNonMirroredListener for key:" + event.getKey());
}
}
@@ -3437,7 +3437,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
Region.Entry entry1 = region.getEntry(key1);
if (!getRegionAttributes().getDataPolicy().withReplication()) {
if (entry1 != null) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("found entry " + entry1);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("found entry " + entry1);
}
assertNull(entry1);
}
@@ -4291,7 +4291,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.NORMAL);
factory.setSubscriptionAttributes(new SubscriptionAttributes(InterestPolicy.ALL));
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("MJT DEBUG: attrs0 are " + factory.create());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("MJT DEBUG: attrs0 are " + factory.create());
createRootRegion(factory.create());
}
{
@@ -4301,7 +4301,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
if (getRegionAttributes().getDataPolicy() == DataPolicy.NORMAL) {
factory.setDataPolicy(DataPolicy.PRELOADED);
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("MJT DEBUG: attrs1 are " + factory.create());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("MJT DEBUG: attrs1 are " + factory.create());
Region region = createRegion(name, factory.create());
}
finishCacheXml(name);
@@ -4380,7 +4380,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
// before the get initial image is complete.
for (int i = 1; i < NB1_NUM_ENTRIES; i += 2) {
Object key = new Integer(i);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Operation #"+i+" on key " + key);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Operation #"+i+" on key " + key);
switch (i % 6) {
case 1: // UPDATE
// use the current timestamp so we know when it happened
@@ -4438,28 +4438,28 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
});
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Before GetInitialImage, data policy is "+getRegionAttributes().getDataPolicy()+", scope is "+getRegionAttributes().getScope());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Before GetInitialImage, data policy is "+getRegionAttributes().getDataPolicy()+", scope is "+getRegionAttributes().getScope());
AsyncInvocation asyncGII = vm2.invokeAsync(create);
if (!getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
vm2.invoke(new SerializableRunnable("Set fast image processing") {
public void run() {
com.gemstone.gemfire.internal.cache.InitialImageOperation.slowImageProcessing = 0;
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after async nonblocking ops complete");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after async nonblocking ops complete");
}
// wait for GII to complete
- Threads.join(asyncGII, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncGII, 30 * 1000);
final long iiComplete = System.currentTimeMillis();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
if (getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
}
if (async.exceptionOccurred()) {
com.gemstone.gemfire.test.dunit.Assert.fail("async failed", async.getException());
@@ -4477,7 +4477,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
region.localDestroyRegion();
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after localDestroyRegion");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after localDestroyRegion");
// invoke repeating so noack regions wait for all updates to get processed
vm2.invokeRepeatingIfNecessary(new CacheSerializableRunnable("Verify entryCount") {
@@ -4500,7 +4500,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
if (entriesDumped) return;
entriesDumped = true;
- LogWriter logger = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter logger = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
logger.info("DUMPING Entries with values in VM that should have been destroyed:");
for (int i = 5; i < NB1_NUM_ENTRIES; i += 6) {
try {
@@ -4513,7 +4513,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
}
}, 5000);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after verify entryCount");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after verify entryCount");
vm2.invoke(new CacheSerializableRunnable("Verify keys/values & Nonblocking") {
@@ -4567,7 +4567,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": " + numConcurrent + " entries out of " + entryCount +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": " + numConcurrent + " entries out of " + entryCount +
" were updated concurrently with getInitialImage");
// make sure at least some of them were concurrent
if (region.getAttributes().getScope().isGlobal()) {
@@ -4583,7 +4583,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after verify key/values");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after verify key/values");
}
/**
@@ -4773,23 +4773,23 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
if (!getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
vm2.invoke(new SerializableRunnable("Set fast image processing") {
public void run() {
com.gemstone.gemfire.internal.cache.InitialImageOperation.slowImageProcessing = 0;
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after async nonblocking ops complete");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after async nonblocking ops complete");
}
// wait for GII to complete
- Threads.join(asyncGII, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncGII, 30 * 1000);
final long iiComplete = System.currentTimeMillis();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
if (getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
}
if (async.exceptionOccurred()) {
@@ -4830,7 +4830,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
if (entriesDumped) return;
entriesDumped = true;
- LogWriter logger = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter logger = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
logger.info("DUMPING Entries with values in VM that should have been destroyed:");
for (int i = 5; i < NB1_NUM_ENTRIES; i += 6) {
logger.info(i + "-->" +
@@ -4891,7 +4891,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": " + numConcurrent + " entries out of " + entryCount +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": " + numConcurrent + " entries out of " + entryCount +
" were updated concurrently with getInitialImage");
// make sure at least some of them were concurrent
{
@@ -5086,7 +5086,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
if (!getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
try {
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
} finally {
vm2.invoke(new SerializableRunnable("Set fast image processing") {
public void run() {
@@ -5094,16 +5094,16 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
});
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after async nonblocking ops complete");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after async nonblocking ops complete");
}
// wait for GII to complete
- Threads.join(asyncGII, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(asyncGII, 30 * 1000);
final long iiComplete = System.currentTimeMillis();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
if (getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
}
if (asyncGII.exceptionOccurred()) {
throw new Error("asyncGII failed", asyncGII.getException());
@@ -5144,7 +5144,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
if (entriesDumped) return;
entriesDumped = true;
- LogWriter logger = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter logger = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
logger.info("DUMPING Entries with values in VM that should have been destroyed:");
for (int i = 5; i < NB1_NUM_ENTRIES; i += 6) {
logger.info(i + "-->" +
@@ -5210,7 +5210,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
}
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": " + numConcurrent + " entries out of " + entryCount +
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": " + numConcurrent + " entries out of " + entryCount +
" were updated concurrently with getInitialImage");
// [sumedh] Occasionally fails. Do these assertions really make sense?
@@ -5409,7 +5409,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
});
if (getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
if (async.exceptionOccurred()) {
com.gemstone.gemfire.test.dunit.Assert.fail("async invocation failed", async.getException());
}
@@ -5419,16 +5419,16 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
com.gemstone.gemfire.internal.cache.InitialImageOperation.slowImageProcessing = 0;
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("after async nonblocking ops complete");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("after async nonblocking ops complete");
}
// wait for GII to complete
//getLogWriter().info("starting wait for GetInitialImage Completion");
- Threads.join(asyncGII, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
+ ThreadUtils.join(asyncGII, 30 * 1000);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Complete GetInitialImage at: " + System.currentTimeMillis());
if (getRegionAttributes().getScope().isGlobal()) {
// wait for nonblocking operations to complete
- Threads.join(async, 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async, 30 * 1000);
}
if (async.exceptionOccurred()) {
com.gemstone.gemfire.test.dunit.Assert.fail("async failed", async.getException());
@@ -5465,10 +5465,10 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": before creates");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": before creates");
vm0.invoke(create);
vm1.invoke(create);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after creates");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after creates");
final Object key = "KEY";
final Object key2 = "KEY2";
@@ -5481,7 +5481,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
disconnectFromDS();
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm2 disconnect");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm2 disconnect");
try {
vm0.invoke(new CacheSerializableRunnable("Put int") {
@@ -5490,7 +5490,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
IntWrapper.IntWrapperSerializer serializer =
(IntWrapper.IntWrapperSerializer)
DataSerializer.register(c);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Registered serializer id:" + serializer.getId()
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Registered serializer id:" + serializer.getId()
+ " class:" + c.getName());
Region region = getRootRegion().getSubregion(name);
@@ -5500,7 +5500,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
assertTrue(serializer.wasInvoked);
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm0 put");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm0 put");
SerializableRunnable get = new CacheSerializableRunnable("Get int") {
public void run2() throws CacheException {
@@ -5537,7 +5537,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
};
vm1.invoke(get);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm1 get");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm1 get");
// Make sure that VMs that connect after registration can get the
// serializer
@@ -5551,7 +5551,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
});
vm2.invoke(create);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm2 create");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm2 create");
vm2.invoke(new CacheSerializableRunnable("Put long") {
public void run2() throws CacheException {
Region region = getRootRegion().getSubregion(name);
@@ -5565,9 +5565,9 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
assertTrue(serializer.wasInvoked);
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm2 put");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm2 put");
vm2.invoke(get);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm2 get");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm2 get");
SerializableRunnable get2 = new CacheSerializableRunnable("Get long") {
public void run2() throws CacheException {
@@ -5579,9 +5579,9 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
};
vm0.invoke(get2);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm0 get2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm0 get2");
vm1.invoke(get2);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after vm1 get2");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after vm1 get2");
// wait a little while for other netsearch requests to return
// before unregistering the serializers that will be needed to process these
@@ -5589,7 +5589,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
} finally {
Wait.pause(1500);
unregisterAllSerializers();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info(name + ": after unregister");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info(name + ": after unregister");
}
}
@@ -5712,7 +5712,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
* system.
*/
private static void unregisterAllSerializers() {
- DistributedTestSupport.unregisterAllDataSerializersFromAllVms();
+ DistributedTestUtils.unregisterAllDataSerializersFromAllVms();
cleanupAllVms();
}
@@ -8175,11 +8175,11 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
long start = System.currentTimeMillis();
RegionVersionVector vm0vv = getVersionVector(vm0);
long end = System.currentTimeMillis();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("version vector transmission took " + (end-start) + " ms");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("vm0 vector = " + vm0vv);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("version vector transmission took " + (end-start) + " ms");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("vm0 vector = " + vm0vv);
- RegionVersionVector vm1vv = getVersionVector(vm1); com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("vm1 vector = " + vm1vv);
- RegionVersionVector vm2vv = getVersionVector(vm2); com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("vm2 vector = " + vm2vv);
+ RegionVersionVector vm1vv = getVersionVector(vm1); com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("vm1 vector = " + vm1vv);
+ RegionVersionVector vm2vv = getVersionVector(vm2); com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("vm2 vector = " + vm2vv);
Map<String, VersionTag> vm0Versions = (Map<String, VersionTag>)vm0.invoke(this.getClass(), "getCCRegionVersions");
Map<String, VersionTag> vm1Versions = (Map<String, VersionTag>)vm1.invoke(this.getClass(), "getCCRegionVersions");
@@ -8515,7 +8515,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
Wait.waitForCriterion(waitForExpiration, TombstoneService.REPLICATED_TOMBSTONE_TIMEOUT+10000, 1000, true);
} catch (AssertionFailedError e) {
CCRegion.dumpBackingMap();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("tombstone service state: " + CCRegion.getCache().getTombstoneService());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("tombstone service state: " + CCRegion.getCache().getTombstoneService());
throw e;
}
}
@@ -8567,7 +8567,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
}
} catch (AssertionFailedError e) {
CCRegion.dumpBackingMap();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("tombstone service state: " + CCRegion.getCache().getTombstoneService());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("tombstone service state: " + CCRegion.getCache().getTombstoneService());
throw e;
} catch (CacheException e) {
com.gemstone.gemfire.test.dunit.Assert.fail("while performing create/destroy operations", e);
@@ -8946,7 +8946,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
public void close() {
}
public Object load(LoaderHelper helper) throws CacheLoaderException {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("The test CacheLoader has been invoked for key '" + helper.getKey() + "'");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("The test CacheLoader has been invoked for key '" + helper.getKey() + "'");
return "loadedValue";
}
});
@@ -9034,7 +9034,7 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
for (int i=0; i<100; i++) {
RegionEntry entry = r.getRegionEntry("cckey"+i);
int stamp = entry.getVersionStamp().getEntryVersion();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("checking key cckey" + i + " having version " + stamp + " entry=" + entry);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("checking key cckey" + i + " having version " + stamp + " entry=" + entry);
assertEquals(2, stamp);
assertEquals(result.get("cckey"+i), i+1);
}
@@ -9101,13 +9101,13 @@ public abstract class MultiVMRegionTestCase extends RegionTestCase {
public static byte[] getCCRegionVersionVector() throws Exception {
Object id = getMemberId();
int vm = VM.getCurrentVMNum();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("vm" + vm + " with id " + id + " copying " + CCRegion.getVersionVector().fullToString());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("vm" + vm + " with id " + id + " copying " + CCRegion.getVersionVector().fullToString());
RegionVersionVector vector = CCRegion.getVersionVector().getCloneForTransmission();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("clone is " + vector);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("clone is " + vector);
HeapDataOutputStream dos = new HeapDataOutputStream(3000, Version.CURRENT);
DataSerializer.writeObject(vector, dos);
byte[] bytes = dos.toByteArray();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("serialized size is " + bytes.length);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("serialized size is " + bytes.length);
return bytes;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PRBucketSynchronizationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PRBucketSynchronizationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PRBucketSynchronizationDUnitTest.java
index 52ac05a..a68bd46 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PRBucketSynchronizationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PRBucketSynchronizationDUnitTest.java
@@ -40,10 +40,10 @@ import com.gemstone.gemfire.internal.cache.VMCachedDeserializable;
import com.gemstone.gemfire.internal.cache.versions.VMVersionTag;
import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
-import com.gemstone.gemfire.test.dunit.DistributedSystemSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -132,7 +132,7 @@ public class PRBucketSynchronizationDUnitTest extends CacheTestCase {
// Now we crash the primary bucket owner simulating death during distribution.
// The backup buckets should perform a delta-GII for the lost member and
// get back in sync
- DistributedSystemSupport.crashDistributedSystem(primaryOwner);
+ DistributedTestUtils.crashDistributedSystem(primaryOwner);
for (VM vm: verifyVMs) {
verifySynchronized(vm, primaryID);
@@ -200,7 +200,7 @@ public class PRBucketSynchronizationDUnitTest extends CacheTestCase {
tag.setEntryVersion(1);
tag.setIsRemoteForTesting();
EntryEventImpl event = EntryEventImpl.create(bucket, Operation.CREATE, "Object3", true, primary, true, false);
- LogWriterSupport.getLogWriter().info("applying this event to the cache: " + event);
+ LogWriterUtils.getLogWriter().info("applying this event to the cache: " + event);
event.setNewValue(new VMCachedDeserializable("value3", 12));
event.setVersionTag(tag);
bucket.getRegionMap().basicPut(event, System.currentTimeMillis(), true, false, null, false, false);
@@ -215,12 +215,12 @@ public class PRBucketSynchronizationDUnitTest extends CacheTestCase {
event = EntryEventImpl.create(bucket, Operation.CREATE, "Object5", true, primary, true, false);
event.setNewValue(Token.TOMBSTONE);
event.setVersionTag(tag);
- LogWriterSupport.getLogWriter().info("applying this event to the cache: " + event);
+ LogWriterUtils.getLogWriter().info("applying this event to the cache: " + event);
bucket.getRegionMap().basicPut(event, System.currentTimeMillis(), true, false, null, false, false);
event.release();
bucket.dumpBackingMap();
- LogWriterSupport.getLogWriter().info("bucket version vector is now " + bucket.getVersionVector().fullToString());
+ LogWriterUtils.getLogWriter().info("bucket version vector is now " + bucket.getVersionVector().fullToString());
assertTrue("bucket should hold entry Object3 now", bucket.containsKey("Object3"));
return true;
}
@@ -237,12 +237,12 @@ public class PRBucketSynchronizationDUnitTest extends CacheTestCase {
boolean dumped = false;
public boolean done() {
if (TestRegion.getCache().getDistributionManager().isCurrentMember(crashedMember)) {
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
if (!TestRegion.containsKey("Object3")) {
waitingFor = "entry for Object3 not found";
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
RegionEntry re = bucket.getRegionMap().getEntry("Object5");
@@ -252,7 +252,7 @@ public class PRBucketSynchronizationDUnitTest extends CacheTestCase {
bucket.dumpBackingMap();
}
waitingFor = "entry for Object5 not found";
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
if (!re.isTombstone()) {
@@ -261,7 +261,7 @@ public class PRBucketSynchronizationDUnitTest extends CacheTestCase {
bucket.dumpBackingMap();
}
waitingFor = "Object5 is not a tombstone but should be: " + re;
- LogWriterSupport.getLogWriter().info(waitingFor);
+ LogWriterUtils.getLogWriter().info(waitingFor);
return false;
}
return true;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PartitionedRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PartitionedRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PartitionedRegionDUnitTest.java
index 50c8353..0408eb7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PartitionedRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/cache30/PartitionedRegionDUnitTest.java
@@ -188,7 +188,7 @@ public class PartitionedRegionDUnitTest extends MultiVMRegionTestCase {
fact.addCacheListener(new CacheListenerAdapter(){
@Override
public void afterInvalidate(EntryEvent event) {
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("afterInvalidate invoked with " + event);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("afterInvalidate invoked with " + event);
InvalidateInvoked = true;
}
});
[17/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java
index 96f02fa..e48516c 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARQueueNewImplDUnitTest.java
@@ -55,7 +55,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.HAEventWrapper;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -354,11 +354,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -392,11 +392,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -431,11 +431,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -468,11 +468,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -510,11 +510,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -565,11 +565,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "40000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -621,11 +621,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
Integer port3 = (Integer)serverVM0.invoke(HARQueueNewImplDUnitTest.class,
"createOneMoreBridgeServer", new Object[] { Boolean.TRUE });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), port3, "0");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), port3, "0");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -652,8 +652,8 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
Integer port3 = (Integer)serverVM0.invoke(HARQueueNewImplDUnitTest.class,
"createOneMoreBridgeServer", new Object[] { Boolean.FALSE });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1", Boolean.TRUE);
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1", Boolean.TRUE);
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, port3, new Integer(PORT2), "1", Boolean.TRUE });
@@ -690,11 +690,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -740,11 +740,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -784,11 +784,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM0.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "30000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -822,11 +822,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
*/
public void testCMRNotReturnedByRootRegionsMethod() throws Exception {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
@@ -871,11 +871,11 @@ public class HARQueueNewImplDUnitTest extends DistributedTestCase {
serverVM1.invoke(ConflationDUnitTest.class, "setIsSlowStart",
new Object[] { "60000" });
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
- final String client1Host = NetworkSupport.getServerHostName(clientVM1.getHost());
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2), "1");
+ final String client1Host = NetworkUtils.getServerHostName(clientVM1.getHost());
clientVM1.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client1Host, new Integer(PORT1), new Integer(PORT2), "1" });
- final String client2Host = NetworkSupport.getServerHostName(clientVM2.getHost());
+ final String client2Host = NetworkUtils.getServerHostName(clientVM2.getHost());
clientVM2.invoke(HARQueueNewImplDUnitTest.class, "createClientCache",
new Object[] { client2Host, new Integer(PORT1), new Integer(PORT2), "1" });
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java
index 5f63b21..7b410d6 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.internal.cache.HARegion;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -854,7 +854,7 @@ public class HARegionQueueDUnitTest extends DistributedTestCase
if (opThreads[i].isInterrupted()) {
fail("Test failed because thread encountered exception");
}
- Threads.join(opThreads[i], 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(opThreads[i], 30 * 1000);
}
}
};
@@ -969,7 +969,7 @@ public class HARegionQueueDUnitTest extends DistributedTestCase
}
};
Wait.waitForCriterion(ev, 30 * 1000, 200, true);
- Threads.join(createQueuesThread, 300 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(createQueuesThread, 300 * 1000);
}
};
@@ -983,7 +983,7 @@ public class HARegionQueueDUnitTest extends DistributedTestCase
if (opThreads[0].isInterrupted()) {
fail("The test has failed as it encountered interrupts in puts & takes");
}
- Threads.join(opThreads[0], 30 * 1000, com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter());
+ ThreadUtils.join(opThreads[0], 30 * 1000);
}
};
vm0.invoke(joinWithThread);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java
index 8f9ba9c..48da630 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HARegionQueueJUnitTest.java
@@ -54,7 +54,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.cache.Conflatable;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.internal.cache.RegionQueue;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -292,7 +292,7 @@ public class HARegionQueueJUnitTest
// call join on the put-threads so that this thread waits till they complete
// before doing verfication
for (i = 0; i < TOTAL_PUT_THREADS; i++) {
- Threads.join(putThreads[i], 30 * 1000, null);
+ ThreadUtils.join(putThreads[i], 30 * 1000);
}
assertFalse(encounteredException);
@@ -384,7 +384,7 @@ public class HARegionQueueJUnitTest
long startTime = System.currentTimeMillis();
for (int k = 0; k < threads.length; k++) {
- Threads.join(threads[k], 60 * 1000, null);
+ ThreadUtils.join(threads[k], 60 * 1000);
}
long totalTime = System.currentTimeMillis() - startTime;
@@ -1252,8 +1252,8 @@ public class HARegionQueueJUnitTest
};
thread1.start();
thread2.start();
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
List list2 = HARegionQueue.createMessageListForTesting();
Iterator iterator = list1.iterator();
boolean doOnce = false;
@@ -1363,8 +1363,8 @@ public class HARegionQueueJUnitTest
};
thread1.start();
thread2.start();
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
List list2 = HARegionQueue.createMessageListForTesting();
Iterator iterator = list1.iterator();
boolean doOnce = false;
@@ -1483,8 +1483,8 @@ public class HARegionQueueJUnitTest
};
thread1.start();
thread2.start();
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
List list2 = HARegionQueue.createMessageListForTesting();
Iterator iterator = list1.iterator();
boolean doOnce = true;
@@ -1649,8 +1649,8 @@ public class HARegionQueueJUnitTest
};
thread1.start();
thread2.start();
- Threads.join(thread1, 30 * 1000, null);
- Threads.join(thread2, 30 * 1000, null);
+ ThreadUtils.join(thread1, 30 * 1000);
+ ThreadUtils.join(thread2, 30 * 1000);
List list2 = HARegionQueue.createMessageListForTesting();
Iterator iterator = list1.iterator();
boolean doOnce = true;
@@ -1769,7 +1769,7 @@ public class HARegionQueueJUnitTest
long startTime = System.currentTimeMillis();
for (int k = 0; k < 3; k++) {
- Threads.join(threads[k], 180 * 1000, null);
+ ThreadUtils.join(threads[k], 180 * 1000);
}
long totalTime = System.currentTimeMillis() - startTime;
@@ -1848,7 +1848,7 @@ public class HARegionQueueJUnitTest
long startTime = System.currentTimeMillis();
for (int k = 0; k < 3; k++) {
- Threads.join(threads[k], 60 * 1000, null);
+ ThreadUtils.join(threads[k], 60 * 1000);
}
long totalTime = System.currentTimeMillis() - startTime;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java
index 9823fa6..6dfcc02 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/HASlowReceiverDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerTestUtil;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -251,7 +251,7 @@ public class HASlowReceiverDUnitTest extends DistributedTestCase {
public void testSlowClient() throws Exception {
setBridgeObeserverForAfterQueueDestroyMessage();
clientVM.invoke(HASlowReceiverDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT0),
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT0),
new Integer(PORT1), new Integer(PORT2), new Integer(2) });
clientVM.invoke(HASlowReceiverDUnitTest.class, "registerInterest");
// add expected socket exception string
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java
index 4e47c5f..1aa3642 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/OperationsPropagationDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -118,7 +118,7 @@ public class OperationsPropagationDUnitTest extends DistributedTestCase
PORT2 = ((Integer)server2.invoke(OperationsPropagationDUnitTest.class,
"createServerCache")).intValue();
client1.invoke(OperationsPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(host), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(host), new Integer(PORT2) });
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java
index 2176a1c..9201a5f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/PutAllDUnitTest.java
@@ -44,8 +44,8 @@ import com.gemstone.gemfire.internal.cache.EntryEventImpl;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -136,11 +136,11 @@ public class PutAllDUnitTest extends DistributedTestCase
PORT2 = ((Integer)server2.invoke(PutAllDUnitTest.class,
"createServerCache")).intValue();
client1.invoke(PutAllDUnitTest.class, "createClientCache1",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1) });
client2.invoke(PutAllDUnitTest.class, "createClientCache2",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2) });
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2) });
try {
- createClientCache2(NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2));
+ createClientCache2(NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2));
}
catch (Exception e) {
fail(" test failed due to "+e);
@@ -550,7 +550,7 @@ public class PutAllDUnitTest extends DistributedTestCase
public void afterCreate(EntryEvent event)
{
- LogWriterSupport.getLogWriter().fine(" entered after created with "+event.getKey());
+ LogWriterUtils.getLogWriter().fine(" entered after created with "+event.getKey());
boolean shouldNotify = false;
Object key = event.getKey();
if (key.equals(PUTALL_KEY1)) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java
index 0e3b9b4..7ab929e 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ha/StatsBugDUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.junit.categories.DistributedTest;
@@ -179,9 +179,9 @@ public class StatsBugDUnitTest extends DistributedTestCase
*/
public void testBug36109() throws Exception
{
- LogWriterSupport.getLogWriter().info("testBug36109 : BEGIN");
+ LogWriterUtils.getLogWriter().info("testBug36109 : BEGIN");
client1.invoke(StatsBugDUnitTest.class, "createClientCacheForInvalidates", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1), new Integer(PORT2) });
client1.invoke(StatsBugDUnitTest.class, "prepopulateClient");
primary.invoke(StatsBugDUnitTest.class, "doEntryOperations",
new Object[] { primaryPrefix });
@@ -204,7 +204,7 @@ public class StatsBugDUnitTest extends DistributedTestCase
}
client1.invoke(StatsBugDUnitTest.class, "verifyNumInvalidates");
- LogWriterSupport.getLogWriter().info("testBug36109 : END");
+ LogWriterUtils.getLogWriter().info("testBug36109 : END");
}
/**
@@ -232,7 +232,7 @@ public class StatsBugDUnitTest extends DistributedTestCase
server.setNotifyBySubscription(false);
server.setSocketBufferSize(32768);
server.start();
- LogWriterSupport.getLogWriter().info("Server started at PORT = " + port);
+ LogWriterUtils.getLogWriter().info("Server started at PORT = " + port);
return new Integer(port);
}
@@ -257,7 +257,7 @@ public class StatsBugDUnitTest extends DistributedTestCase
RegionAttributes attrs = factory.create();
Region region = cache.createRegion(REGION_NAME, attrs);
region.registerInterest("ALL_KEYS");
- LogWriterSupport.getLogWriter().info("Client cache created");
+ LogWriterUtils.getLogWriter().info("Client cache created");
}
/**
@@ -281,7 +281,7 @@ public class StatsBugDUnitTest extends DistributedTestCase
RegionAttributes attrs = factory.create();
Region region = cache.createRegion(REGION_NAME, attrs);
region.registerInterest("ALL_KEYS", false, false);
- LogWriterSupport.getLogWriter().info("Client cache created");
+ LogWriterUtils.getLogWriter().info("Client cache created");
}
/**
@@ -292,11 +292,11 @@ public class StatsBugDUnitTest extends DistributedTestCase
public static void verifyNumInvalidates()
{
long invalidatesRecordedByStats = pool.getInvalidateCount();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"invalidatesRecordedByStats = " + invalidatesRecordedByStats);
int expectedInvalidates = TOTAL_SERVERS * PUTS_PER_SERVER;
- LogWriterSupport.getLogWriter().info("expectedInvalidates = " + expectedInvalidates);
+ LogWriterUtils.getLogWriter().info("expectedInvalidates = " + expectedInvalidates);
if (invalidatesRecordedByStats != expectedInvalidates) {
fail("Invalidates received by client(" + invalidatesRecordedByStats
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java
index 89bc65f..934772c 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/locks/TXLockServiceDUnitTest.java
@@ -38,9 +38,9 @@ import com.gemstone.gemfire.internal.cache.TXRegionLockRequestImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
/**
* This class tests distributed ownership via the DistributedLockService api.
@@ -126,7 +126,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
}
public void disable_testTXRecoverGrantorMessageProcessor() throws Exception {
- LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor]");
+ LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor]");
TXLockService.createDTLS();
checkDLockRecoverGrantorMessageProcessor();
@@ -176,7 +176,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
dtls.release(txLockId);
// check results to verify no locks were provided in reply
- Threads.join(thread, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(thread, 30 * 1000);
assertEquals("testTXRecoverGrantor_replyCode_PASS is false", true,
testTXRecoverGrantor_replyCode_PASS);
assertEquals("testTXRecoverGrantor_heldLocks_PASS is false", true,
@@ -185,7 +185,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
protected static volatile TXLockId testTXLock_TXLockId;
public void testTXLock() {
- LogWriterSupport.getLogWriter().info("[testTXLock]");
+ LogWriterUtils.getLogWriter().info("[testTXLock]");
final int grantorVM = 0;
final int clientA = 1;
final int clientB = 2;
@@ -203,7 +203,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
));
// create grantor
- LogWriterSupport.getLogWriter().info("[testTXLock] create grantor");
+ LogWriterUtils.getLogWriter().info("[testTXLock] create grantor");
Host.getHost(0).getVM(grantorVM).invoke(new SerializableRunnable() {
public void run() {
@@ -213,7 +213,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
sleep(20);
// create client and request txLock
- LogWriterSupport.getLogWriter().info("[testTXLock] create clientA and request txLock");
+ LogWriterUtils.getLogWriter().info("[testTXLock] create clientA and request txLock");
Host.getHost(0).getVM(clientA).invoke(new SerializableRunnable() {
public void run() {
@@ -231,7 +231,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
});
// create nuther client and request overlapping txLock... verify fails
- LogWriterSupport.getLogWriter().info("[testTXLock] create clientB and fail txLock");
+ LogWriterUtils.getLogWriter().info("[testTXLock] create clientB and fail txLock");
Host.getHost(0).getVM(clientB).invoke(new SerializableRunnable() {
public void run() {
@@ -261,7 +261,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
*/
// release txLock
- LogWriterSupport.getLogWriter().info("[testTXLock] clientA releases txLock");
+ LogWriterUtils.getLogWriter().info("[testTXLock] clientA releases txLock");
Host.getHost(0).getVM(clientA).invoke(
new SerializableRunnable("[testTXLock] clientA releases txLock") {
@@ -273,7 +273,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
sleep(20);
// try nuther client again and verify success
- LogWriterSupport.getLogWriter().info("[testTXLock] clientB requests txLock");
+ LogWriterUtils.getLogWriter().info("[testTXLock] clientB requests txLock");
Host.getHost(0).getVM(clientB).invoke(
new SerializableRunnable("[testTXLock] clientB requests txLock") {
@@ -285,7 +285,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
});
// release txLock
- LogWriterSupport.getLogWriter().info("[testTXLock] clientB releases txLock");
+ LogWriterUtils.getLogWriter().info("[testTXLock] clientB releases txLock");
Host.getHost(0).getVM(clientB).invoke(
new SerializableRunnable("[testTXLock] clientB releases txLock") {
@@ -298,7 +298,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
protected static volatile TXLockId testTXOriginatorRecoveryProcessor_TXLockId;
public void testTXOriginatorRecoveryProcessor() {
- LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor]");
+ LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor]");
final int originatorVM = 0;
final int grantorVM = 1;
final int particpantA = 2;
@@ -324,7 +324,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
}
// create grantor
- LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor] grantorVM becomes grantor");
+ LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor] grantorVM becomes grantor");
Host.getHost(0).getVM(grantorVM).invoke(new SerializableRunnable() {
public void run() {
@@ -341,7 +341,7 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
Boolean.TRUE, isGrantor);
// have a originatorVM get a txLock with three participants including grantor
- LogWriterSupport.getLogWriter().info("[testTXOriginatorRecoveryProcessor] originatorVM requests txLock");
+ LogWriterUtils.getLogWriter().info("[testTXOriginatorRecoveryProcessor] originatorVM requests txLock");
Host.getHost(0).getVM(originatorVM).invoke(new SerializableRunnable() {
public void run() {
@@ -425,14 +425,14 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
}
public void testDTLSIsDistributed() {
- LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed]");
+ LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed]");
// have all vms lock and hold the same LTLS lock simultaneously
final Host host = Host.getHost(0);
int vmCount = host.getVMCount();
for (int vm = 0; vm < vmCount; vm++) {
final int finalvm = vm;
- LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] testing vm " + finalvm);
+ LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] testing vm " + finalvm);
Host.getHost(0).getVM(finalvm).invoke(new SerializableRunnable() {
public void run() {
@@ -445,21 +445,21 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
TXLockServiceDUnitTest.class, "isDistributed_DTLS", new Object[] {});
assertEquals("isDistributed should be true for DTLS",
Boolean.TRUE, isDistributed);
- LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] isDistributed=" + isDistributed);
+ LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] isDistributed=" + isDistributed);
// lock a key...
Boolean gotLock = (Boolean)host.getVM(finalvm).invoke(
TXLockServiceDUnitTest.class, "lock_DTLS", new Object[] {"KEY"});
assertEquals("gotLock is false after calling lock_DTLS",
Boolean.TRUE, gotLock);
- LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] gotLock=" + gotLock);
+ LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] gotLock=" + gotLock);
// unlock it...
Boolean unlock = (Boolean)host.getVM(finalvm).invoke(
TXLockServiceDUnitTest.class, "unlock_DTLS", new Object[] {"KEY"});
assertEquals("unlock is false after calling unlock_DTLS",
Boolean.TRUE, unlock);
- LogWriterSupport.getLogWriter().info("[testDTLSIsDistributed] unlock=" + unlock);
+ LogWriterUtils.getLogWriter().info("[testDTLSIsDistributed] unlock=" + unlock);
}
}
@@ -670,14 +670,14 @@ public class TXLockServiceDUnitTest extends DistributedTestCase {
Host host = Host.getHost(0);
int vmCount = host.getVMCount();
for (int i=0; i<vmCount; i++) {
- LogWriterSupport.getLogWriter().info("Invoking " + methodName + "on VM#" + i);
+ LogWriterUtils.getLogWriter().info("Invoking " + methodName + "on VM#" + i);
host.getVM(i).invoke(this.getClass(), methodName, args);
}
}
public Properties getDistributedSystemProperties() {
Properties props = super.getDistributedSystemProperties();
- props.setProperty("log-level", LogWriterSupport.getDUnitLogLevel());
+ props.setProperty("log-level", LogWriterUtils.getDUnitLogLevel());
return props;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java
index 80f5df5..0371df7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug39356DUnitTest.java
@@ -111,7 +111,7 @@ public class Bug39356DUnitTest extends CacheTestCase {
SerializableRunnable verifyBuckets = new SerializableRunnable("Verify buckets") {
public void run() {
- LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ LogWriter log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
Cache cache = getCache();
PartitionedRegion r = (PartitionedRegion) cache.getRegion(REGION_NAME);
for(int i = 0; i < r.getAttributes().getPartitionAttributes().getTotalNumBuckets(); i++) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java
index 379ab04..d7e6a16 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug43684DUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.RegionEntry;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.VM;
@@ -237,7 +237,7 @@ public class Bug43684DUnitTest extends DistributedTestCase {
public static Integer createServerCache(Boolean isReplicated, Boolean isPrimaryEmpty) throws Exception {
DistributedTestCase.disconnectFromDS();
Properties props = new Properties();
- props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
// props.setProperty("log-file", "server_" + OSProcess.getId() + ".log");
// props.setProperty("log-level", "fine");
props.setProperty("statistic-archive-file", "server_" + OSProcess.getId()
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java
index 0b81cec..0f9a3b7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/Bug51400DUnitTest.java
@@ -37,9 +37,9 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxyStats;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -94,7 +94,7 @@ public class Bug51400DUnitTest extends DistributedTestCase {
public static Integer createServerCache(Integer mcastPort,
Integer maxMessageCount) throws Exception {
Properties props = new Properties();
- props.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ props.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
// props.setProperty("log-file", "server_" + OSProcess.getId() + ".log");
// props.setProperty("log-level", "fine");
// props.setProperty("statistic-archive-file", "server_" + OSProcess.getId()
@@ -177,7 +177,7 @@ public class Bug51400DUnitTest extends DistributedTestCase {
"createServerCache", new Object[] { maxQSize });
client1.invoke(Bug51400DUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer[]{port1}, ackInterval});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer[]{port1}, ackInterval});
// Do puts from server as well as from client on the same key.
AsyncInvocation ai1 = server0.invokeAsync(Bug51400DUnitTest.class,
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java
index 56ced6c..d74a94b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistPRKRFDUnitTest.java
@@ -29,8 +29,7 @@ import com.gemstone.gemfire.internal.cache.DiskStoreImpl;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -102,7 +101,7 @@ public class PersistPRKRFDUnitTest extends PersistentPartitionedRegionTestBase {
}
}
});
- Threads.join(async1, MAX_WAIT, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, MAX_WAIT);
closeCache(vm0);
// update
@@ -145,7 +144,7 @@ public class PersistPRKRFDUnitTest extends PersistentPartitionedRegionTestBase {
}
}
});
- Threads.join(async1, MAX_WAIT, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, MAX_WAIT);
closeCache(vm0);
// destroy
@@ -188,7 +187,7 @@ public class PersistPRKRFDUnitTest extends PersistentPartitionedRegionTestBase {
}
}
});
- Threads.join(async1, MAX_WAIT, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(async1, MAX_WAIT);
checkData(vm0, 0, 10, "a");
checkData(vm0, 10, 11, null);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java
index 4364b0f..abf546d 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionDUnitTest.java
@@ -78,8 +78,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.RMIException;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
@@ -1387,7 +1387,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR
Cache cache = getCache();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(host), serverPort);
+ pf.addServer(NetworkUtils.getServerHostName(host), serverPort);
pf.setSubscriptionEnabled(true);
pf.create("pool");
AttributesFactory af = new AttributesFactory();
@@ -1450,7 +1450,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR
DistributedTestCase.disconnectFromDS();
await().atMost(30, SECONDS).until(() -> {return (cache == null || cache.isClosed());});
- LogWriterSupport.getLogWriter().info("Cache is confirmed closed");
+ LogWriterUtils.getLogWriter().info("Cache is confirmed closed");
}
}
});
@@ -1661,7 +1661,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR
}
});
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPR(vm0, 1, 0, 1);
//Make sure we create a bucket
@@ -1671,7 +1671,7 @@ public class PersistentPartitionedRegionDUnitTest extends PersistentPartitionedR
IgnoredException ex = IgnoredException.addIgnoredException("PartitionOfflineException");
try {
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPR(vm1, 1, 0, 1);
//Make sure get a partition offline exception
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java
index 17cd552..3b89271 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionTestBase.java
@@ -59,7 +59,7 @@ import com.gemstone.gemfire.internal.cache.persistence.PersistentMemberID;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -203,7 +203,7 @@ public abstract class PersistentPartitionedRegionTestBase extends CacheTestCase
protected void createData(VM vm, final int startKey, final int endKey,
final String value) {
- LogWriterSupport.getLogWriter().info("createData invoked. PR_REGION_NAME is " + PR_REGION_NAME);
+ LogWriterUtils.getLogWriter().info("createData invoked. PR_REGION_NAME is " + PR_REGION_NAME);
createData(vm, startKey, endKey,value, PR_REGION_NAME);
}
@@ -213,7 +213,7 @@ public abstract class PersistentPartitionedRegionTestBase extends CacheTestCase
public void run() {
Cache cache = getCache();
- LogWriterSupport.getLogWriter().info("creating data in " + regionName);
+ LogWriterUtils.getLogWriter().info("creating data in " + regionName);
Region region = cache.getRegion(regionName);
for(int i =startKey; i < endKey; i++) {
@@ -790,7 +790,7 @@ public abstract class PersistentPartitionedRegionTestBase extends CacheTestCase
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line;
while((line = br.readLine()) != null) {
- LogWriterSupport.getLogWriter().fine("OUTPUT:" + line);
+ LogWriterUtils.getLogWriter().fine("OUTPUT:" + line);
//TODO validate output
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java
index e81ba89..90118d1 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/PersistentPartitionedRegionWithTransactionDUnitTest.java
@@ -24,7 +24,7 @@ import com.gemstone.gemfire.internal.cache.TXManagerImpl;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -131,12 +131,12 @@ public class PersistentPartitionedRegionWithTransactionDUnitTest extends Persist
@Override
protected void createData(VM vm, final int startKey, final int endKey, final String value,
final String regionName) {
- LogWriterSupport.getLogWriter().info("creating runnable to create data for region " + regionName);
+ LogWriterUtils.getLogWriter().info("creating runnable to create data for region " + regionName);
SerializableRunnable createData = new SerializableRunnable() {
public void run() {
Cache cache = getCache();
- LogWriterSupport.getLogWriter().info("getting region " + regionName);
+ LogWriterUtils.getLogWriter().info("getting region " + regionName);
Region region = cache.getRegion(regionName);
for(int i =startKey; i < endKey; i++) {
@@ -168,7 +168,7 @@ public class PersistentPartitionedRegionWithTransactionDUnitTest extends Persist
public void run() {
Cache cache = getCache();
- LogWriterSupport.getLogWriter().info("checking data in " + regionName);
+ LogWriterUtils.getLogWriter().info("checking data in " + regionName);
Region region = cache.getRegion(regionName);
for(int i =startKey; i < endKey; i++) {
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java
index bf6cad8..1b00b8c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/partitioned/fixed/FixedPartitioningTestBase.java
@@ -73,7 +73,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
import com.gemstone.gemfire.internal.cache.tier.sockets.Message;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -463,7 +463,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
+ LogWriterUtils.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
}
}
@@ -487,7 +487,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
}
@@ -514,7 +514,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -538,7 +538,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
+ LogWriterUtils.getLogWriter().info("Customer :- { " + custid + " : " + customer + " }");
}
}
@@ -562,7 +562,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
}
@@ -589,7 +589,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -614,7 +614,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("Customer :- { " + custid + " : " + customer + " }");
}
}
@@ -641,7 +641,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
}
@@ -670,7 +670,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -696,7 +696,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putCustomerPartitionedRegion : failed while doing put operation in CustomerPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter()
+ LogWriterUtils.getLogWriter()
.info("Customer :- { " + custid + " : " + customer + " }");
}
}
@@ -723,7 +723,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putOrderPartitionedRegion : failed while doing put operation in OrderPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
+ LogWriterUtils.getLogWriter().info("Order :- { " + orderId + " : " + order + " }");
}
}
}
@@ -752,7 +752,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
"putShipmentPartitionedRegion : failed while doing put operation in ShipmentPartitionedRegion ",
e);
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Shipment :- { " + shipmentId + " : " + shipment + " }");
}
}
@@ -853,7 +853,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
// assertNotNull(orderPartitionedregion.get(orderId));
if (custId.equals(orderId.getCustId())) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
orderId + "belongs to node " + idmForCustomer + " "
+ idmForOrder);
assertEquals(idmForCustomer, idmForOrder);
@@ -865,7 +865,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
ShipmentId shipmentId = (ShipmentId)shipmentIterator.next();
// assertNotNull(shipmentPartitionedregion.get(shipmentId));
if (orderId.equals(shipmentId.getOrderId())) {
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
shipmentId + "belongs to node " + idmForOrder + " "
+ idmForShipment);
}
@@ -1089,15 +1089,15 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
Integer primaryBuckets) {
HashMap localBucket2RegionMap = (HashMap)region_FPR.getDataStore()
.getSizeLocally();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + region_FPR + " in this VM :- "
+ localBucket2RegionMap.size() + "List of buckets : "
+ localBucket2RegionMap.keySet());
assertEquals(numBuckets.intValue(), localBucket2RegionMap.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of primary buckets the " + region_FPR + " in this VM :- "
+ region_FPR.getDataStore().getNumberOfPrimaryBucketsManaged());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Lit of Primaries in this VM :- "
+ region_FPR.getDataStore().getAllLocalPrimaryBucketIds());
@@ -1109,15 +1109,15 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
Integer numBuckets, Integer primaryBuckets) {
HashMap localBucket2RegionMap = (HashMap)region_FPR.getDataStore()
.getSizeLocally();
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of the " + region_FPR + " in this VM :- "
+ localBucket2RegionMap.size() + "List of buckets : "
+ localBucket2RegionMap.keySet());
assertEquals(numBuckets.intValue(), localBucket2RegionMap.size());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of primary buckets the " + region_FPR + " in this VM :- "
+ region_FPR.getDataStore().getNumberOfPrimaryBucketsManaged());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Lit of Primaries in this VM :- "
+ region_FPR.getDataStore().getAllLocalPrimaryBucketIds());
@@ -1307,7 +1307,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
}
};
Wait.waitForCriterion(wc, 20000, 500, false);
- LogWriterSupport.getLogWriter().info("end of beforeCalculatingStartingBucketId");
+ LogWriterUtils.getLogWriter().info("end of beforeCalculatingStartingBucketId");
}
});
}
@@ -1364,7 +1364,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().error(t);
+ LogWriterUtils.getLogWriter().error(t);
}
}
}
@@ -1378,7 +1378,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
throw e;
}
catch (Throwable t) {
- LogWriterSupport.getLogWriter().error("Error in closing the cache ", t);
+ LogWriterUtils.getLogWriter().error("Error in closing the cache ", t);
}
}
@@ -1386,7 +1386,7 @@ public class FixedPartitioningTestBase extends DistributedTestCase {
try {
cleanDiskDirs();
} catch(IOException e) {
- LogWriterSupport.getLogWriter().error("Error cleaning disk dirs", e);
+ LogWriterUtils.getLogWriter().error("Error cleaning disk dirs", e);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java
index 5f62346..ecaa1ba 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRVVRecoveryDUnitTest.java
@@ -68,7 +68,7 @@ import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -415,7 +415,7 @@ public class PersistentRVVRecoveryDUnitTest extends PersistentReplicatedTestBase
Region.Entry entry = ((PartitionedRegion)region).getEntry("testKey", true /*Entry is destroyed*/);
RegionEntry re = ((EntrySnapshot)entry).getRegionEntry();
- LogWriterSupport.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region));
+ LogWriterUtils.getLogWriter().fine("RegionEntry for testKey: " + re.getKey() + " " + re.getValueInVM((LocalRegion) region));
assertTrue(re.getValueInVM((LocalRegion) region) instanceof Tombstone);
VersionTag tag = re.getVersionStamp().asVersionTag();
@@ -839,7 +839,7 @@ public class PersistentRVVRecoveryDUnitTest extends PersistentReplicatedTestBase
for(int i = 0; i < 3; i++) {
NonTXEntry entry = (NonTXEntry) recoveredRegion.getEntry("key" + i);
tagsFromKrf[i] = entry.getRegionEntry().getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("krfTag[" + i + "]="+ tagsFromKrf[i] + ",value=" + entry.getValue());
+ LogWriterUtils.getLogWriter().info("krfTag[" + i + "]="+ tagsFromKrf[i] + ",value=" + entry.getValue());
}
closeCache();
@@ -854,7 +854,7 @@ public class PersistentRVVRecoveryDUnitTest extends PersistentReplicatedTestBase
for(int i = 0; i < 3; i++) {
NonTXEntry entry = (NonTXEntry) recoveredRegion.getEntry("key" + i);
tagsFromCrf[i] = entry.getRegionEntry().getVersionStamp().asVersionTag();
- LogWriterSupport.getLogWriter().info("crfTag[" + i + "]="+ tagsFromCrf[i] + ",value=" + entry.getValue());
+ LogWriterUtils.getLogWriter().info("crfTag[" + i + "]="+ tagsFromCrf[i] + ",value=" + entry.getValue());
}
//Make sure the version tags from the krf and the crf match.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java
index 0a7e64e..f20762c 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/persistence/PersistentRecoveryOrderDUnitTest.java
@@ -73,7 +73,7 @@ import com.gemstone.gemfire.internal.cache.versions.RegionVersionVector;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -111,31 +111,31 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
putAnEntry(vm0);
- LogWriterSupport.getLogWriter().info("closing region in vm0");
+ LogWriterUtils.getLogWriter().info("closing region in vm0");
closeRegion(vm0);
updateTheEntry(vm1);
- LogWriterSupport.getLogWriter().info("closing region in vm1");
+ LogWriterUtils.getLogWriter().info("closing region in vm1");
closeRegion(vm1);
//This ought to wait for VM1 to come back
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation future = createPersistentRegionAsync(vm0);
waitForBlockedInitialization(vm0);
assertTrue(future.isAlive());
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
future.join(MAX_WAIT);
@@ -164,9 +164,9 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
putAnEntry(vm0);
@@ -181,17 +181,17 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
}
});
- LogWriterSupport.getLogWriter().info("closing region in vm0");
+ LogWriterUtils.getLogWriter().info("closing region in vm0");
closeRegion(vm0);
updateTheEntry(vm1);
- LogWriterSupport.getLogWriter().info("closing region in vm1");
+ LogWriterUtils.getLogWriter().info("closing region in vm1");
closeCache(vm1);
//This ought to wait for VM1 to come back
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation future = createPersistentRegionAsync(vm0);
waitForBlockedInitialization(vm0);
@@ -209,7 +209,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
adminDS = AdminDistributedSystemFactory.getDistributedSystem(config);
adminDS.connect();
Set<PersistentID> missingIds = adminDS.getMissingPersistentMembers();
- LogWriterSupport.getLogWriter().info("waiting members=" + missingIds);
+ LogWriterUtils.getLogWriter().info("waiting members=" + missingIds);
assertEquals(1, missingIds.size());
PersistentID missingMember = missingIds.iterator().next();
adminDS.revokePersistentMember(
@@ -252,7 +252,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
//Now, we should not be able to create a region
//in vm1, because the this member was revoked
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
IgnoredException e = IgnoredException.addIgnoredException(RevokedPersistentDataException.class.getSimpleName(), vm1);
try {
createPersistentRegion(vm1);
@@ -299,9 +299,9 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
VM vm1 = host.getVM(1);
VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
putAnEntry(vm0);
@@ -316,12 +316,12 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
}
});
- LogWriterSupport.getLogWriter().info("closing region in vm0");
+ LogWriterUtils.getLogWriter().info("closing region in vm0");
closeRegion(vm0);
updateTheEntry(vm1);
- LogWriterSupport.getLogWriter().info("closing region in vm1");
+ LogWriterUtils.getLogWriter().info("closing region in vm1");
closeRegion(vm1);
final File dirToRevoke = getDiskDirForVM(vm1);
@@ -347,7 +347,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
});
//This shouldn't wait, because we revoked the member
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
checkForRecoveryStat(vm0, true);
@@ -366,7 +366,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
//Now, we should not be able to create a region
//in vm1, because the this member was revoked
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
IgnoredException e = IgnoredException.addIgnoredException(RevokedPersistentDataException.class.getSimpleName(), vm1);
try {
createPersistentRegion(vm1);
@@ -392,9 +392,9 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
VM vm2 = host.getVM(2);
VM vm3 = host.getVM(3);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
createPersistentRegion(vm2);
@@ -410,28 +410,28 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
}
});
- LogWriterSupport.getLogWriter().info("closing region in vm0");
+ LogWriterUtils.getLogWriter().info("closing region in vm0");
closeRegion(vm0);
updateTheEntry(vm1);
- LogWriterSupport.getLogWriter().info("closing region in vm1");
+ LogWriterUtils.getLogWriter().info("closing region in vm1");
closeRegion(vm1);
updateTheEntry(vm2, "D");
- LogWriterSupport.getLogWriter().info("closing region in vm2");
+ LogWriterUtils.getLogWriter().info("closing region in vm2");
closeRegion(vm2);
//These ought to wait for VM2 to come back
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation future0 = createPersistentRegionAsync(vm0);
waitForBlockedInitialization(vm0);
assertTrue(future0.isAlive());
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
final AsyncInvocation future1 = createPersistentRegionAsync(vm1);
waitForBlockedInitialization(vm1);
assertTrue(future1.isAlive());
@@ -447,7 +447,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
adminDS = AdminDistributedSystemFactory.getDistributedSystem(config);
adminDS.connect();
Set<PersistentID> missingIds = adminDS.getMissingPersistentMembers();
- LogWriterSupport.getLogWriter().info("waiting members=" + missingIds);
+ LogWriterUtils.getLogWriter().info("waiting members=" + missingIds);
assertEquals(1, missingIds.size());
} catch (AdminException e) {
throw new RuntimeException(e);
@@ -582,12 +582,12 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
restoreBackup(vm1);
//This ought to wait for VM1 to come back
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation future = createPersistentRegionAsync(vm0);
waitForBlockedInitialization(vm0);
assertTrue(future.isAlive());
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
future.join(MAX_WAIT);
@@ -1077,24 +1077,24 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
putAnEntry(vm0);
- LogWriterSupport.getLogWriter().info("closing region in vm0");
+ LogWriterUtils.getLogWriter().info("closing region in vm0");
closeRegion(vm0);
updateTheEntry(vm1);
- LogWriterSupport.getLogWriter().info("closing region in vm1");
+ LogWriterUtils.getLogWriter().info("closing region in vm1");
closeRegion(vm1);
//This ought to wait for VM1 to come back
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
AsyncInvocation future = createPersistentRegionAsync(vm0);
waitForBlockedInitialization(vm0);
@@ -1180,7 +1180,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
final VM vm1 = host.getVM(1);
final VM vm2 = host.getVM(2);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
//Add a hook which will disconnect from the distributed
@@ -1386,10 +1386,10 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
Cache cache = getCache();
Region region = cache.getRegion(REGION_NAME);
if (region == null) {
- LogWriterSupport.getLogWriter().severe("removing listener for PersistentRecoveryOrderDUnitTest because region was not found: " + REGION_NAME);
+ LogWriterUtils.getLogWriter().severe("removing listener for PersistentRecoveryOrderDUnitTest because region was not found: " + REGION_NAME);
Object old = DistributionMessageObserver.setInstance(null);
if (old != this) {
- LogWriterSupport.getLogWriter().severe("removed listener was not the invoked listener", new Exception("stack trace"));
+ LogWriterUtils.getLogWriter().severe("removed listener was not the invoked listener", new Exception("stack trace"));
}
return;
}
@@ -1673,17 +1673,17 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
createPersistentRegion(vm0);
putAnEntry(vm0);
- LogWriterSupport.getLogWriter().info("closing region in vm0");
+ LogWriterUtils.getLogWriter().info("closing region in vm0");
closeCache(vm0);
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
createPersistentRegion(vm1);
putAnEntry(vm1);
- LogWriterSupport.getLogWriter().info("Creating region in VM0");
+ LogWriterUtils.getLogWriter().info("Creating region in VM0");
IgnoredException ex = IgnoredException.addIgnoredException("ConflictingPersistentDataException", vm0);
try {
//this should cause a conflict
@@ -1697,7 +1697,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
ex.remove();
}
- LogWriterSupport.getLogWriter().info("closing region in vm1");
+ LogWriterUtils.getLogWriter().info("closing region in vm1");
closeCache(vm1);
//This should work now
@@ -1707,7 +1707,7 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
ex = IgnoredException.addIgnoredException("ConflictingPersistentDataException", vm1);
//Now make sure vm1 gets a conflict
- LogWriterSupport.getLogWriter().info("Creating region in VM1");
+ LogWriterUtils.getLogWriter().info("Creating region in VM1");
try {
//this should cause a conflict
createPersistentRegion(vm1);
@@ -1780,11 +1780,11 @@ public class PersistentRecoveryOrderDUnitTest extends PersistentReplicatedTestBa
@Override
public Properties getDistributedSystemProperties() {
- LogWriterSupport.getLogWriter().info("Looking for ack-wait-threshold");
+ LogWriterUtils.getLogWriter().info("Looking for ack-wait-threshold");
String s = System.getProperty("gemfire.ack-wait-threshold");
if (s != null) {
SAVED_ACK_WAIT_THRESHOLD = s;
- LogWriterSupport.getLogWriter().info("removing system property gemfire.ack-wait-threshold");
+ LogWriterUtils.getLogWriter().info("removing system property gemfire.ack-wait-threshold");
System.getProperties().remove("gemfire.ack-wait-threshold");
}
Properties props = super.getDistributedSystemProperties();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java
index e9fb1ed..a3976a7 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/BackwardCompatibilityHigherVersionClientDUnitTest.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.internal.cache.tier.ConnectionProxy;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.cache.client.internal.ConnectionFactoryImpl;
@@ -170,7 +170,7 @@ public class BackwardCompatibilityHigherVersionClientDUnitTest extends
"setHandshakeVersionForTesting");
client1.invoke(BackwardCompatibilityHigherVersionClientDUnitTest.class,
"createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), port1 });
+ NetworkUtils.getServerHostName(server1.getHost()), port1 });
client1.invoke(BackwardCompatibilityHigherVersionClientDUnitTest.class,
"verifyConnectionToServerFailed");
}
[23/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug39079DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug39079DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug39079DUnitTest.java
index 59aeac5..532b806 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug39079DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug39079DUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.persistence.UninterruptibleFileChannel;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -306,7 +306,7 @@ public class Bug39079DUnitTest extends CacheTestCase {
Integer port = (Integer)vm0.invoke(Bug39079DUnitTest.class, "createServerCache");
//create cache client
vm1.invoke(Bug39079DUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(vm0.getHost()), port});
+ new Object[] { NetworkUtils.getServerHostName(vm0.getHost()), port});
// validate
vm0.invoke(Bug39079DUnitTest.class, "validateRuningBridgeServerList");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41091DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41091DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41091DUnitTest.java
index 00fa760..a03f035 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41091DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41091DUnitTest.java
@@ -36,8 +36,8 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.cache.InitialImageOperation.RequestImageMessage;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -98,7 +98,7 @@ public class Bug41091DUnitTest extends CacheTestCase {
Properties props = new Properties();
props.setProperty(DistributionConfig.ENABLE_NETWORK_PARTITION_DETECTION_NAME, "true");
- props.setProperty(DistributionConfig.LOCATORS_NAME, NetworkSupport.getServerHostName(host) + "[" + locatorPort + "]");
+ props.setProperty(DistributionConfig.LOCATORS_NAME, NetworkUtils.getServerHostName(host) + "[" + locatorPort + "]");
getSystem(props);
@@ -118,7 +118,7 @@ public class Bug41091DUnitTest extends CacheTestCase {
public void run() {
Properties props = new Properties();
props.setProperty(DistributionConfig.ENABLE_NETWORK_PARTITION_DETECTION_NAME, "true");
- props.setProperty(DistributionConfig.LOCATORS_NAME, NetworkSupport.getServerHostName(host) + "[" + locatorPort + "]");
+ props.setProperty(DistributionConfig.LOCATORS_NAME, NetworkUtils.getServerHostName(host) + "[" + locatorPort + "]");
getSystem(props);
Cache cache = getCache();
AttributesFactory af = new AttributesFactory();
@@ -152,7 +152,7 @@ public class Bug41091DUnitTest extends CacheTestCase {
disconnectFromDS();
Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, String.valueOf(0));
- props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel());
+ props.setProperty(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel());
props.setProperty(DistributionConfig.ENABLE_NETWORK_PARTITION_DETECTION_NAME, "true");
props.setProperty(DistributionConfig.ENABLE_CLUSTER_CONFIGURATION_NAME, "false");
try {
@@ -160,7 +160,7 @@ public class Bug41091DUnitTest extends CacheTestCase {
+ ".log");
InetAddress bindAddr = null;
try {
- bindAddr = InetAddress.getByName(NetworkSupport.getServerHostName(vm.getHost()));
+ bindAddr = InetAddress.getByName(NetworkUtils.getServerHostName(vm.getHost()));
} catch (UnknownHostException uhe) {
Assert.fail("While resolving bind address ", uhe);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41957DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41957DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41957DUnitTest.java
index d3fc346..a16a9c3 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41957DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug41957DUnitTest.java
@@ -35,9 +35,9 @@ import com.gemstone.gemfire.cache30.ClientServerTestCase;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -66,7 +66,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final VM client = host.getVM(1);
final String regionName = getUniqueName();
final int serverPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false);
@@ -91,7 +91,7 @@ import com.gemstone.gemfire.test.dunit.VM;
public void run2() throws CacheException {
// Create DS
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
getSystem(config);
// Create Region
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug45164DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug45164DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug45164DUnitTest.java
index c8d1ffc..371df3f 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug45164DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug45164DUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.cache.RegionShortcut;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
public class Bug45164DUnitTest extends CacheTestCase {
@@ -79,7 +79,7 @@ public class Bug45164DUnitTest extends CacheTestCase {
Cache cache = getCache(new CacheFactory());
Region<Integer, Object> region = cache.<Integer, Object>createRegionFactory(RegionShortcut.PARTITION).create("test");
if (region == null) {
- LogWriterSupport.getLogWriter().error("oops!");
+ LogWriterUtils.getLogWriter().error("oops!");
}
}
};
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug47667DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug47667DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug47667DUnitTest.java
index 24682c1..1c0abe6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug47667DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/Bug47667DUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.cache.client.internal.LocatorTestBase;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -57,7 +57,7 @@ public class Bug47667DUnitTest extends LocatorTestBase {
VM client = host.getVM(3);
final int locatorPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String locatorHost = NetworkSupport.getServerHostName(host);
+ final String locatorHost = NetworkUtils.getServerHostName(host);
startLocatorInVM(locator, locatorPort, "");
String locString = getLocatorString(host, locatorPort);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearDAckDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearDAckDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearDAckDUnitTest.java
index 5b9feed..321228b 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearDAckDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearDAckDUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.internal.cache.versions.VersionSource;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -73,7 +73,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
VM vm1 = host.getVM(1);
vm0ID = (DistributedMember)vm0.invoke(ClearDAckDUnitTest.class, "createCacheVM0");
vm1ID = (DistributedMember)vm1.invoke(ClearDAckDUnitTest.class, "createCacheVM1");
- LogWriterSupport.getLogWriter().info("Cache created in successfully");
+ LogWriterUtils.getLogWriter().info("Cache created in successfully");
}
public void preTearDown(){
@@ -104,7 +104,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
try{
// props.setProperty("mcast-port", "1234");
// ds = DistributedSystem.connect(props);
- LogWriterSupport.getLogWriter().info("I am vm0");
+ LogWriterUtils.getLogWriter().info("I am vm0");
ds = (new ClearDAckDUnitTest("temp")).getSystem(props);
cache = CacheFactory.create(ds);
@@ -118,7 +118,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
RegionAttributes attr = factory.create();
region = cache.createRegion("map", attr);
- LogWriterSupport.getLogWriter().info("vm0 map region: " + region);
+ LogWriterUtils.getLogWriter().info("vm0 map region: " + region);
paperWork = cache.createRegion("paperWork", attr);
return cache.getDistributedSystem().getDistributedMember();
} catch (CacheException ex){
@@ -129,7 +129,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
try{
// props.setProperty("mcast-port", "1234");
// ds = DistributedSystem.connect(props);
- LogWriterSupport.getLogWriter().info("I am vm1");
+ LogWriterUtils.getLogWriter().info("I am vm1");
ds = (new ClearDAckDUnitTest("temp")).getSystem(props);
//DistributedSystem.setThreadsSocketPolicy(false);
CacheObserverImpl observer = new CacheObserverImpl();
@@ -145,7 +145,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
RegionAttributes attr = factory.create();
region = cache.createRegion("map", attr);
- LogWriterSupport.getLogWriter().info("vm1 map region: " + region);
+ LogWriterUtils.getLogWriter().info("vm1 map region: " + region);
paperWork = cache.createRegion("paperWork", attr);
return cache.getDistributedSystem().getDistributedMember();
@@ -158,7 +158,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
try{
// props.setProperty("mcast-port", "1234");
// ds = DistributedSystem.connect(props);
- LogWriterSupport.getLogWriter().info("I am vm2");
+ LogWriterUtils.getLogWriter().info("I am vm2");
ds = (new ClearDAckDUnitTest("temp")).getSystem(props);
//DistributedSystem.setThreadsSocketPolicy(false);
CacheObserverImpl observer = new CacheObserverImpl();
@@ -174,7 +174,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
RegionAttributes attr = factory.create();
region = cache.createRegion("map", attr);
- LogWriterSupport.getLogWriter().info("vm2 map region: " + region);
+ LogWriterUtils.getLogWriter().info("vm2 map region: " + region);
paperWork = cache.createRegion("paperWork", attr);
region.put("vm2Key", "vm2Value");
@@ -209,14 +209,14 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
vm0.invoke(ClearDAckDUnitTest.class, "putMethod", objArr);
}
- LogWriterSupport.getLogWriter().info("Did all puts successfully");
+ LogWriterUtils.getLogWriter().info("Did all puts successfully");
long regionVersion = (Long)vm1.invoke(ClearDAckDUnitTest.class, "getRegionVersion", new Object[]{vm0ID});
vm0.invoke(ClearDAckDUnitTest.class,"clearMethod");
boolean flag = vm1.invokeBoolean(ClearDAckDUnitTest.class,"getVM1Flag");
- LogWriterSupport.getLogWriter().fine("Flag in VM1="+ flag);
+ LogWriterUtils.getLogWriter().fine("Flag in VM1="+ flag);
assertTrue(flag);
@@ -228,7 +228,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
vm2.invoke(ClearDAckDUnitTest.class, "createCacheVM2AndLocalClear");
flag = vm1.invokeBoolean(ClearDAckDUnitTest.class,"getVM1Flag");
- LogWriterSupport.getLogWriter().fine("Flag in VM1="+ flag);
+ LogWriterUtils.getLogWriter().fine("Flag in VM1="+ flag);
assertFalse(flag);
}//end of test case
@@ -257,7 +257,7 @@ public class ClearDAckDUnitTest extends DistributedTestCase {
long end = System.currentTimeMillis();
long diff = end - start;
- LogWriterSupport.getLogWriter().info("Clear Thread proceeded before receiving the ack message in (milli seconds): "+diff);
+ LogWriterUtils.getLogWriter().info("Clear Thread proceeded before receiving the ack message in (milli seconds): "+diff);
}catch (Exception e){
e.printStackTrace();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearGlobalDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearGlobalDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearGlobalDUnitTest.java
index 6f384a2..bde9540 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearGlobalDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClearGlobalDUnitTest.java
@@ -35,8 +35,8 @@ import com.gemstone.gemfire.internal.cache.CacheObserverAdapter;
import com.gemstone.gemfire.internal.cache.CacheObserverHolder;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import java.util.Properties;
@@ -82,7 +82,7 @@ public class ClearGlobalDUnitTest extends DistributedTestCase
server1 = host.getVM(0);
server1.invoke(ClearGlobalDUnitTest.class, "createCacheServer1");
createCacheServer2();
- LogWriterSupport.getLogWriter().fine("Cache created in successfully");
+ LogWriterUtils.getLogWriter().fine("Cache created in successfully");
}
public void preTearDown()
@@ -174,7 +174,7 @@ public class ClearGlobalDUnitTest extends DistributedTestCase
throw new Exception("Test Failed: " + exceptionMsg);
}
else{
- LogWriterSupport.getLogWriter().info("Test Passed Successfully ");
+ LogWriterUtils.getLogWriter().info("Test Passed Successfully ");
}
}
@@ -184,7 +184,7 @@ public class ClearGlobalDUnitTest extends DistributedTestCase
{
Thread th = new PutThread();
th.start();
- Threads.join(th, 5 * 60 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(th, 5 * 60 * 1000);
synchronized (lock) {
testComplete = true;
lock.notify();
@@ -204,7 +204,7 @@ public class ClearGlobalDUnitTest extends DistributedTestCase
catch (TimeoutException ex) {
//pass
testFailed = false;
- LogWriterSupport.getLogWriter().info("Expected TimeoutException in thread ");
+ LogWriterUtils.getLogWriter().info("Expected TimeoutException in thread ");
}
catch (Exception ex) {
exceptionMsg.append(" Exception occurred while region.put(key,value)");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerGetAllDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerGetAllDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerGetAllDUnitTest.java
index 3d001e6..f7f633c 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerGetAllDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerGetAllDUnitTest.java
@@ -42,10 +42,10 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.offheap.SimpleMemoryAllocatorImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedTestSupport;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -72,7 +72,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final String regionName = getUniqueName();
final int mcastPort = 0; /* loner is ok for this test*/ //AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS);
final int serverPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false, false);
@@ -123,7 +123,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final VM client = host.getVM(1);
final String regionName = getUniqueName();
final int serverPort = AvailablePortHelper.getRandomAvailableTCPPort();
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false, false, true/*offheap*/);
@@ -175,7 +175,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final VM client = host.getVM(1);
final String regionName = getUniqueName();
final int serverPort = AvailablePortHelper.getRandomAvailableTCPPort();
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false, false, true/*offheap*/);
@@ -287,7 +287,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final String regionName = getUniqueName();
final int mcastPort = 0; /* loner is ok for this test*/ //AvailablePort.getRandomAvailablePort(AvailablePort.JGROUPS);
final int serverPort = AvailablePortHelper.getRandomAvailableTCPPort();
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false, false);
@@ -398,7 +398,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final VM client = host.getVM(1);
final String regionName = getUniqueName();
final int serverPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false, true);
@@ -460,7 +460,7 @@ import com.gemstone.gemfire.test.dunit.VM;
int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final int server1Port = ports[0];
final int server2Port = ports[1];
- final String serverHost = NetworkSupport.getServerHostName(server1.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server1.getHost());
createBridgeServer(server1, regionName, server1Port, true, false);
@@ -525,7 +525,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final VM client = host.getVM(1);
final String regionName = getUniqueName();
final int serverPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
createBridgeServer(server, regionName, serverPort, false, false);
@@ -588,7 +588,7 @@ import com.gemstone.gemfire.test.dunit.VM;
final VM client = host.getVM(1);
final String regionName = getUniqueName();
final int serverPort = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
- final String serverHost = NetworkSupport.getServerHostName(server.getHost());
+ final String serverHost = NetworkUtils.getServerHostName(server.getHost());
final int numLocalValues = 101;
createBridgeServerWithoutLoader(server, regionName, serverPort, false);
@@ -669,7 +669,7 @@ import com.gemstone.gemfire.test.dunit.VM;
public void run2() throws CacheException {
// Create DS
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
if (offheap) {
config.setProperty(DistributionConfig.OFF_HEAP_MEMORY_SIZE_NAME, "350m");
}
@@ -733,7 +733,7 @@ import com.gemstone.gemfire.test.dunit.VM;
public void run2() throws CacheException {
// Create DS
Properties config = new Properties();
- config.setProperty("locators", "localhost["+DistributedTestSupport.getDUnitLocatorPort()+"]");
+ config.setProperty("locators", "localhost["+DistributedTestUtils.getDUnitLocatorPort()+"]");
getSystem(config);
// Create Region
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerInvalidAndDestroyedEntryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerInvalidAndDestroyedEntryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerInvalidAndDestroyedEntryDUnitTest.java
index c14767b..e5ec301 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerInvalidAndDestroyedEntryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerInvalidAndDestroyedEntryDUnitTest.java
@@ -41,7 +41,7 @@ import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.cache.tier.InterestType;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableCallableIF;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -192,10 +192,10 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
myRegion.invalidate(key2);
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("creating client cache");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("creating client cache");
ClientCache c = new ClientCacheFactory()
.addPoolServer("localhost", serverPort)
- .set(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel())
+ .set(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel())
.create();
Region myRegion = c.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create(regionName);;
if (useTX) {
@@ -206,7 +206,7 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
assertNotNull(myRegion.get(notAffectedKey));
// get of an invalid entry should return null and create the entry in an invalid state
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("getting "+key1+" - should reach this cache and be INVALID");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("getting "+key1+" - should reach this cache and be INVALID");
assertNull(myRegion.get(key1));
assertTrue(myRegion.containsKey(key1));
@@ -263,7 +263,7 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
// test that a listener is not invoked when there is already an invalidated
// entry in the client cache
UpdateListener listener = new UpdateListener();
- listener.log = com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter();
+ listener.log = com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter();
myRegion.getAttributesMutator().addCacheListener(listener);
myRegion.get(key1);
assertEquals("expected no cache listener invocations",
@@ -310,10 +310,10 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
myRegion.destroy(key2);
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("creating client cache");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("creating client cache");
ClientCache c = new ClientCacheFactory()
.addPoolServer("localhost", serverPort)
- .set(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel())
+ .set(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel())
.create();
Region myRegion = c.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create(regionName);;
if (useTX) {
@@ -322,7 +322,7 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
// get of a valid entry should work
assertNotNull(myRegion.get(notAffectedKey));
// get of an invalid entry should return null and create the entry in an invalid state
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("getting "+key1+" - should reach this cache and be a TOMBSTONE");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("getting "+key1+" - should reach this cache and be a TOMBSTONE");
assertNull(myRegion.get(key1));
assertFalse(myRegion.containsKey(key1));
RegionEntry entry;
@@ -375,7 +375,7 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
keys.add(notAffectedKey); keys.add(key1); keys.add(key2);
Map result = myRegion.getAll(keys);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("result of getAll = " + result);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("result of getAll = " + result);
assertNotNull(result.get(notAffectedKey));
assertNull(result.get(key1));
assertNull(result.get(key2));
@@ -432,10 +432,10 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
}
}
});
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("creating client cache");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("creating client cache");
ClientCache c = new ClientCacheFactory()
.addPoolServer("localhost", serverPort)
- .set(DistributionConfig.LOG_LEVEL_NAME, LogWriterSupport.getDUnitLogLevel())
+ .set(DistributionConfig.LOG_LEVEL_NAME, LogWriterUtils.getDUnitLogLevel())
.setPoolSubscriptionEnabled(true)
.create();
@@ -458,7 +458,7 @@ public class ClientServerInvalidAndDestroyedEntryDUnitTest extends CacheTestCase
BucketRegion bucket = ((PartitionedRegion)myRegion).getBucketRegion(key10);
if (bucket != null) {
event.setRegion(bucket);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("performing local destroy in " + bucket + " ccEnabled="+bucket.concurrencyChecksEnabled + " rvv="+bucket.getVersionVector());
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("performing local destroy in " + bucket + " ccEnabled="+bucket.concurrencyChecksEnabled + " rvv="+bucket.getVersionVector());
bucket.concurrencyChecksEnabled = false; // turn off cc so entry is removed
bucket.mapDestroy(event, false, false, null);
bucket.concurrencyChecksEnabled = true;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionCCEDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionCCEDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionCCEDUnitTest.java
index 6a262e5..6f76fc8 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionCCEDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionCCEDUnitTest.java
@@ -25,7 +25,7 @@ import com.gemstone.gemfire.cache.partition.PartitionRegionHelper;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -99,7 +99,7 @@ public class ClientServerTransactionCCEDUnitTest extends
for (Object key : clientTags.keySet()) {
VersionTag serverTag = serverTags.get(key);
serverTag.setMemberID(serverId);
- LogWriterSupport.getLogWriter().fine("SWAP:key:"+key+" clientVersion:"+clientTags.get(key)+" serverVersion:"+serverTag);
+ LogWriterUtils.getLogWriter().fine("SWAP:key:"+key+" clientVersion:"+clientTags.get(key)+" serverVersion:"+serverTag);
assertEquals(clientTags.get(key), serverTags.get(key));
serverTags.remove(key);
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionDUnitTest.java
index 9405509..2bee512 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ClientServerTransactionDUnitTest.java
@@ -67,7 +67,7 @@ import com.gemstone.gemfire.internal.cache.execute.util.RollbackFunction;
import com.gemstone.gemfire.internal.cache.tx.ClientTXStateStub;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -171,7 +171,7 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
// these settings were used to manually check that tx operation stats were being updated
//ccf.set(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true");
//ccf.set(DistributionConfig.STATISTIC_ARCHIVE_FILE_NAME, "clientStats.gfs");
@@ -231,7 +231,7 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port1);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
@@ -302,7 +302,7 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port1);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(cachingProxy ? ClientRegionShortcut.CACHING_PROXY : ClientRegionShortcut.PROXY);
@@ -344,7 +344,7 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
Wait.waitForCriterion(new WaitCriterion() {
public boolean done() {
Set states = txmgr.getTransactionsForClient((InternalDistributedMember)myId);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("found " + states.size() + " tx states for " + myId);
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("found " + states.size() + " tx states for " + myId);
return states.isEmpty();
}
public String description() {
@@ -401,17 +401,17 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
assertEquals(initSize, pr.size());
assertEquals(initSize, r.size());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("Looking up transaction manager");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("Looking up transaction manager");
TXManagerImpl mgr = (TXManagerImpl) getCache().getCacheTransactionManager();
Context ctx = getCache().getJNDIContext();
UserTransaction utx = (UserTransaction)ctx.lookup("java:/UserTransaction");
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("starting transaction");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("starting transaction");
if (useJTA) {
utx.begin();
} else {
mgr.begin();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("done starting transaction");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("done starting transaction");
for (int i = 0; i < MAX_ENTRIES; i++) {
CustId custId = new CustId(i);
Customer cust = new Customer("name"+suffix+i, "address"+suffix+i);
@@ -423,10 +423,10 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
Customer cust = new Customer("name"+suffix+i, "address"+suffix+i);
assertEquals(cust, r.get(custId));
assertEquals(cust, pr.get(custId));
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("SWAP:get:"+r.get(custId));
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("SWAP:get:"+pr.get(custId));
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("SWAP:get:"+r.get(custId));
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("SWAP:get:"+pr.get(custId));
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("suspending transaction");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("suspending transaction");
if (!useJTA) {
TXStateProxy tx = mgr.internalSuspend();
if (prePopulateData) {
@@ -441,7 +441,7 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
assertNull(r.get(new CustId(i)));
assertNull(pr.get(new CustId(i)));
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("resuming transaction");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("resuming transaction");
mgr.resume(tx);
}
assertEquals(
@@ -450,13 +450,13 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
assertEquals(
"pr sized should be " + MAX_ENTRIES + " but it is:" + pr.size(),
MAX_ENTRIES, pr.size());
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("committing transaction");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("committing transaction");
if (useJTA) {
utx.commit();
} else {
getCache().getCacheTransactionManager().commit();
}
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("done committing transaction");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("done committing transaction");
assertEquals(
"r sized should be " + MAX_ENTRIES + " but it is:" + r.size(),
MAX_ENTRIES, r.size());
@@ -686,7 +686,7 @@ public class ClientServerTransactionDUnitTest extends RemoteTransactionDUnitTest
Order order = (orderRegion.getAll(keys)).get(orderId);
assertNotNull(order);
mgr.rollback();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("entry for " + orderId + " = " + orderRegion.getEntry(orderId));
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("entry for " + orderId + " = " + orderRegion.getEntry(orderId));
assertNull(orderRegion.getEntry(orderId));
return null;
}
@@ -1337,7 +1337,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ClientCacheFactory ccf = new ClientCacheFactory();
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port1);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(ClientRegionShortcut.PROXY);
@@ -1433,7 +1433,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost", port2);
ccf.setPoolLoadConditioningInterval(1);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(ClientRegionShortcut.PROXY);
@@ -1550,7 +1550,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
if (port2 != 0) ccf.addPoolServer("localhost", port2);
if (port3 != 0) ccf.addPoolServer("localhost", port3);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(cachingProxy ? ClientRegionShortcut.CACHING_PROXY : ClientRegionShortcut.PROXY);
@@ -2062,7 +2062,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost", port2);
ccf.setPoolSubscriptionEnabled(false);
ccf.setPoolLoadConditioningInterval(1);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(ClientRegionShortcut.PROXY);
@@ -2455,7 +2455,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.setPoolMinConnections(5);
ccf.setPoolLoadConditioningInterval(-1);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
Region r1 = cCache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create("r1");
Region r2 = cCache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create("r2");
@@ -2688,7 +2688,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port);
ccf.addPoolServer("localhost", port2);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
// these settings were used to manually check that tx operation stats were being updated
//ccf.set(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true");
//ccf.set(DistributionConfig.STATISTIC_ARCHIVE_FILE_NAME, "clientStats.gfs");
@@ -2702,7 +2702,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
);
Region cust = getCache().getRegion(CUSTOMER);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().fine("SWAP:doing first get from client");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().fine("SWAP:doing first get from client");
assertNull(cust.get(new CustId(0)));
assertNull(cust.get(new CustId(1)));
ArrayList args = new ArrayList();
@@ -2990,7 +2990,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port1);
if (port2 != 0) ccf.addPoolServer("localhost", port2);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
@@ -3124,7 +3124,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost", port2);
ccf.setPoolMinConnections(0);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
@@ -3212,7 +3212,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port1);
ccf.setPoolMinConnections(0);
ccf.setPoolSubscriptionEnabled(false);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<CustId, Customer> custrf = cCache
.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY);
@@ -3302,7 +3302,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.setPoolMinConnections(0);
ccf.setPoolSubscriptionEnabled(true);
ccf.setPoolSubscriptionRedundancy(0);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
Region r = cCache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).addCacheListener(new ClientListener()).create(regionName);
r.registerInterestRegex(".*");
@@ -3318,7 +3318,7 @@ public void testClientCommitAndDataStoreGetsEvent() throws Exception {
ccf.addPoolServer("localhost"/*getServerHostName(Host.getHost(0))*/, port1);
ccf.setPoolMinConnections(0);
ccf.setPoolSubscriptionEnabled(true);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
Region r = cCache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create(regionName);
getCache().getCacheTransactionManager().begin();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentMapOpsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentMapOpsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentMapOpsDUnitTest.java
index c4f04a7..44b89de 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentMapOpsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentMapOpsDUnitTest.java
@@ -50,7 +50,7 @@ import com.gemstone.gemfire.distributed.internal.membership.MembershipManager;
import com.gemstone.gemfire.distributed.internal.membership.gms.MembershipManagerHelper;
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
@@ -148,7 +148,7 @@ public class ConcurrentMapOpsDUnitTest extends CacheTestCase {
ccf.addPoolServer("localhost", port2);
}
ccf.setPoolSubscriptionEnabled(true);
- ccf.set("log-level", LogWriterSupport.getDUnitLogLevel());
+ ccf.set("log-level", LogWriterUtils.getDUnitLogLevel());
ClientCache cCache = getClientCache(ccf);
ClientRegionFactory<Integer, String> crf = cCache
.createClientRegionFactory(isEmpty ? ClientRegionShortcut.PROXY
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRegionOperationsJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRegionOperationsJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRegionOperationsJUnitTest.java
index 0ed0841..833ef11 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRegionOperationsJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRegionOperationsJUnitTest.java
@@ -39,7 +39,7 @@ import org.junit.experimental.categories.Category;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.Scope;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -444,8 +444,8 @@ public class ConcurrentRegionOperationsJUnitTest extends DiskRegionTestingBase
});
t1.start();
t2.start();
- Threads.join(t1, 30 * 1000, null);
- Threads.join(t2, 30 * 1000, null);
+ ThreadUtils.join(t1, 30 * 1000);
+ ThreadUtils.join(t2, 30 * 1000);
assertTrue(!failure);
}
@@ -540,19 +540,19 @@ public class ConcurrentRegionOperationsJUnitTest extends DiskRegionTestingBase
this.timeToStop.set(true);
}
for (int i = 0; i < numberOfPutsThreads; i++) {
- Threads.join(putThreads[i], 10*1000, null);
+ ThreadUtils.join(putThreads[i], 10*1000);
}
for (int i = 0; i < numberOfGetsThreads; i++) {
- Threads.join(getThreads[i], 10*1000, null);
+ ThreadUtils.join(getThreads[i], 10*1000);
}
for (int i = 0; i < numberOfDestroysThreads; i++) {
- Threads.join(destroyThreads[i], 10*1000, null);
+ ThreadUtils.join(destroyThreads[i], 10*1000);
}
for (int i = 0; i < numberOfClearThreads; i++) {
- Threads.join(clearThreads[i], 10*1000, null);
+ ThreadUtils.join(clearThreads[i], 10*1000);
}
for (int i = 0; i < numberOfForceRollThreads; i++) {
- Threads.join(forceRollThreads[i], 10*1000, null);
+ ThreadUtils.join(forceRollThreads[i], 10*1000);
}
if (this.validate) {
@@ -824,7 +824,7 @@ public class ConcurrentRegionOperationsJUnitTest extends DiskRegionTestingBase
);
region.clear();
- Threads.join(th, 20 * 1000, null);
+ ThreadUtils.join(th, 20 * 1000);
LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = false;
DiskStoreImpl.DEBUG_DELAY_JOINING_WITH_COMPACTOR = 500;
CacheObserverHolder.setInstance(old);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRollingAndRegionOperationsJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRollingAndRegionOperationsJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRollingAndRegionOperationsJUnitTest.java
index 9a58c33..b95fa4e 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRollingAndRegionOperationsJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConcurrentRollingAndRegionOperationsJUnitTest.java
@@ -41,7 +41,7 @@ import com.gemstone.gemfire.internal.cache.DiskRegionProperties;
import com.gemstone.gemfire.internal.cache.DiskRegionTestingBase;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.RegionEntry;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
@@ -866,7 +866,7 @@ public class ConcurrentRollingAndRegionOperationsJUnitTest extends
for (int i = 0; i < threads.size(); ++i) {
Thread th = (Thread)threads.get(i);
if (th != null) {
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
}
}
assertTrue(
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConnectDisconnectDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConnectDisconnectDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConnectDisconnectDUnitTest.java
index 90f0016..7a29e65 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConnectDisconnectDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/ConnectDisconnectDUnitTest.java
@@ -38,13 +38,13 @@ import com.gemstone.gemfire.distributed.internal.membership.gms.mgr.GMSMembershi
import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.AsyncInvocation;
-import com.gemstone.gemfire.test.dunit.DistributedSystemSupport;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
+import com.gemstone.gemfire.test.dunit.DistributedTestUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
/** A test of 46438 - missing response to an update attributes message */
public class ConnectDisconnectDUnitTest extends CacheTestCase {
@@ -76,7 +76,7 @@ public class ConnectDisconnectDUnitTest extends CacheTestCase {
// setLocatorPorts(ports);
for(int i = 0; i < 20; i++) {
- LogWriterSupport.getLogWriter().info("Test run: " + i);
+ LogWriterUtils.getLogWriter().info("Test run: " + i);
runOnce();
tearDown();
setUp();
@@ -90,7 +90,7 @@ public class ConnectDisconnectDUnitTest extends CacheTestCase {
static int[] locatorPorts;
public void setLocatorPorts(int[] ports) {
- DistributedSystemSupport.deleteLocatorStateFile(ports);
+ DistributedTestUtils.deleteLocatorStateFile(ports);
String locators = "";
for (int i=0; i<ports.length; i++) {
if (i > 0) {
@@ -114,7 +114,7 @@ public class ConnectDisconnectDUnitTest extends CacheTestCase {
@Override
protected final void postTearDownCacheTestCase() throws Exception {
if (locatorPorts != null) {
- DistributedSystemSupport.deleteLocatorStateFile(locatorPorts);
+ DistributedTestUtils.deleteLocatorStateFile(locatorPorts);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationDUnitTest.java
index 2e848bf..af35666 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationDUnitTest.java
@@ -659,7 +659,7 @@ public class DeltaPropagationDUnitTest extends DistributedTestCase {
primary = (((PoolImpl)pool).getPrimaryPort() == PORT1) ? VM0
: ((((PoolImpl)pool).getPrimaryPort() == PORT2) ? VM1 : VM2);
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("waiting for client to receive last_key");
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("waiting for client to receive last_key");
waitForLastKey();
long fromDeltasOnClient = DeltaTestImpl.getFromDeltaInvokations()
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationStatsDUnitTest.java
index 1e35b39..bf96b18 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaPropagationStatsDUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.CacheClientProxy;
import com.gemstone.gemfire.internal.tcp.ConnectionTable;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -132,7 +132,7 @@ public class DeltaPropagationStatsDUnitTest extends DistributedTestCase {
int port = (Integer)vm0.invoke(DeltaPropagationStatsDUnitTest.class,
"createServerCache", args);
- createClientCache(NetworkSupport.getServerHostName(vm0.getHost()), port);
+ createClientCache(NetworkUtils.getServerHostName(vm0.getHost()), port);
vm0.invoke(DeltaPropagationStatsDUnitTest.class, "putCleanDelta",
new Object[] {Integer.valueOf(numOfKeys), Long.valueOf(updates)});
@@ -160,7 +160,7 @@ public class DeltaPropagationStatsDUnitTest extends DistributedTestCase {
int port = (Integer)vm0.invoke(DeltaPropagationStatsDUnitTest.class,
"createServerCache", args);
- createClientCache(NetworkSupport.getServerHostName(vm0.getHost()), port);
+ createClientCache(NetworkUtils.getServerHostName(vm0.getHost()), port);
vm0.invoke(DeltaPropagationStatsDUnitTest.class,
"putErrorDeltaForReceiver", new Object[] {Integer.valueOf(numOfKeys),
@@ -281,7 +281,7 @@ public class DeltaPropagationStatsDUnitTest extends DistributedTestCase {
Scope.DISTRIBUTED_ACK, Boolean.TRUE};
Integer port = (Integer)vm0.invoke(DeltaPropagationStatsDUnitTest.class,
"createServerCache", args);
- createClientCache(NetworkSupport.getServerHostName(vm0.getHost()), port);
+ createClientCache(NetworkUtils.getServerHostName(vm0.getHost()), port);
putCleanDelta(numOfKeys, updates);
putLastKey();
@@ -317,7 +317,7 @@ public class DeltaPropagationStatsDUnitTest extends DistributedTestCase {
Scope.DISTRIBUTED_ACK, Boolean.TRUE};
Integer port = (Integer)vm0.invoke(DeltaPropagationStatsDUnitTest.class,
"createServerCache", args);
- createClientCache(NetworkSupport.getServerHostName(vm0.getHost()), port);
+ createClientCache(NetworkUtils.getServerHostName(vm0.getHost()), port);
putErrorDeltaForReceiver(numOfKeys, updates, errors);
putErrorDeltaForSender(numOfKeys, updates, errors2, Boolean.FALSE);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaSizingDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaSizingDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaSizingDUnitTest.java
index 73a0122..4de9daf 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaSizingDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DeltaSizingDUnitTest.java
@@ -31,7 +31,7 @@ import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache30.CacheTestCase;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableCallable;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -110,8 +110,8 @@ public class DeltaSizingDUnitTest extends CacheTestCase {
int port2) {
AttributesFactory<Integer, TestDelta> attr = new AttributesFactory<Integer, TestDelta>();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(host), port1);
- pf.addServer(NetworkSupport.getServerHostName(host), port2);
+ pf.addServer(NetworkUtils.getServerHostName(host), port1);
+ pf.addServer(NetworkUtils.getServerHostName(host), port2);
pf.create("pool");
attr.setCloningEnabled(clone);
attr.setDataPolicy(DataPolicy.EMPTY);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionClearJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionClearJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionClearJUnitTest.java
index 938d741..7e58329 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionClearJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionClearJUnitTest.java
@@ -44,7 +44,7 @@ import com.gemstone.gemfire.cache.RegionEvent;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.cache.versions.VersionStamp;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
// @TODO: use DiskRegionTestingBase and DiskRegionHelperFactory
@@ -184,7 +184,7 @@ public class DiskRegionClearJUnitTest {
fail("timed out counter="+counter);
}
}
- Threads.join(thread, 10 * 60 * 1000, null);
+ ThreadUtils.join(thread, 10 * 60 * 1000);
Assert.assertTrue(counter == 3);
if(!cleared)
fail("clear not done although puts have been done");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionJUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionJUnitTest.java
index 94a120f..94e6ec6 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionJUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DiskRegionJUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.internal.cache.lru.LRUStatistics;
import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand;
import com.gemstone.gemfire.internal.cache.persistence.UninterruptibleFileChannel;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
@@ -412,11 +412,11 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
}
long startTime = System.currentTimeMillis();
- Threads.join(thread1, 20 * 1000, null);
- Threads.join(thread2, 20 * 1000, null);
- Threads.join(thread3, 20 * 1000, null);
- Threads.join(thread4, 20 * 1000, null);
- Threads.join(thread5, 20 * 1000, null);
+ ThreadUtils.join(thread1, 20 * 1000);
+ ThreadUtils.join(thread2, 20 * 1000);
+ ThreadUtils.join(thread3, 20 * 1000);
+ ThreadUtils.join(thread4, 20 * 1000);
+ ThreadUtils.join(thread5, 20 * 1000);
long interval = System.currentTimeMillis() - startTime;
if (interval > 100000) {
@@ -522,11 +522,11 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
long startTime = System.currentTimeMillis();
finished = true;
- Threads.join(thread1, 5 * 60 * 1000, null);
- Threads.join(thread2, 5 * 60 * 1000, null);
- Threads.join(thread3, 5 * 60 * 1000, null);
- Threads.join(thread4, 5 * 60 * 1000, null);
- Threads.join(thread5, 5 * 60 * 1000, null);
+ ThreadUtils.join(thread1, 5 * 60 * 1000);
+ ThreadUtils.join(thread2, 5 * 60 * 1000);
+ ThreadUtils.join(thread3, 5 * 60 * 1000);
+ ThreadUtils.join(thread4, 5 * 60 * 1000);
+ ThreadUtils.join(thread5, 5 * 60 * 1000);
long interval = System.currentTimeMillis() - startTime;
if (interval > 100000) {
fail(" Test took too long in going to join, it should have exited before 100000 ms");
@@ -721,7 +721,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
oplogs = dsi.testHookGetAllOverflowOplogs();
int retryCount = 20;
while (oplogs.size() > 1 && retryCount > 0) {
- Wait.staticPause(100);
+ Wait.pause(100);
oplogs = dsi.testHookGetAllOverflowOplogs();
retryCount--;
}
@@ -1153,7 +1153,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
public void beforeGoingToCompact()
{
try {
- Threads.join(t1, 60 * 1000, null);
+ ThreadUtils.join(t1, 60 * 1000);
}
catch (Exception ignore) {
logWriter.error("Exception occured", ignore);
@@ -1193,7 +1193,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
});
testThread.start();
// region.clear();
- Threads.join(testThread, 40 * 1000, null);
+ ThreadUtils.join(testThread, 40 * 1000);
assertFalse(failureCause, testFailed);
assertFalse("Expected situation of max directory size violation happening and available space less than zero did not happen ", exceptionOccured); // CC jade1d failure
@@ -1733,7 +1733,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
region.create("key1", "value1");
try {
cache.getLogger().info("waiting for clear to finish");
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
}
catch (Exception ie) {
DiskRegionJUnitTest.this.exceptionOccured = true;
@@ -1783,7 +1783,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
public void beforeDiskClear()
{
th.start();
- Wait.staticPause(7 * 1000);
+ Wait.pause(7 * 1000);
System.out.println("FIXME: this thread does not terminate--EVER!");
// try {
// DistributedTestCase.join(th, 7 * 1000, null);
@@ -1796,7 +1796,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
});
try {
region.clear();
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
assertFalse(this.failureCause, this.exceptionOccured);
//We expect 1 entry to exist, because the clear was triggered before
//the update
@@ -1843,7 +1843,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = false;
th.start();
System.out.println("FIXME: this thread (2) does not terminate--EVER!");
- Wait.staticPause(10 * 1000);
+ Wait.pause(10 * 1000);
// try {
// DistributedTestCase.join(th, 10 * 1000, null);
// }
@@ -1855,7 +1855,7 @@ public class DiskRegionJUnitTest extends DiskRegionTestingBase
});
try {
region.clear();
- Threads.join(th, 30 * 1000, null);
+ ThreadUtils.join(th, 30 * 1000);
assertFalse(this.failureCause, this.exceptionOccured);
//We expect 1 entry to exist, because the clear was triggered before
//the update
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DistributedCacheTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DistributedCacheTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DistributedCacheTestCase.java
index b2bd412..309caab 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DistributedCacheTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/DistributedCacheTestCase.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -230,7 +230,7 @@ public abstract class DistributedCacheTestCase
Region newRegion =
root.createSubregion(name, factory.create());
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Created Region '" + newRegion.getFullPath() + "'");
}
@@ -302,7 +302,7 @@ public abstract class DistributedCacheTestCase
factory.create());
sub.create(entryName, null);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Defined Entry named '" + entryName + "' in region '" +
sub.getFullPath() +"'");
}
@@ -329,7 +329,7 @@ public abstract class DistributedCacheTestCase
sub.put(entryName, value);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Put value " + value + " in entry " + entryName + " in region '" +
region.getFullPath() +"'");
}
@@ -378,7 +378,7 @@ public abstract class DistributedCacheTestCase
sub.put(entryName, value);
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Replaced value " + value + "in entry " + entryName + " in region '" +
region.getFullPath() +"'");
}
@@ -467,7 +467,7 @@ public abstract class DistributedCacheTestCase
Host host = Host.getHost(0);
int vmCount = host.getVMCount();
for (int i=0; i<vmCount; i++) {
- LogWriterSupport.getLogWriter().info("Invoking " + methodName + "on VM#" + i);
+ LogWriterUtils.getLogWriter().info("Invoking " + methodName + "on VM#" + i);
host.getVM(i).invoke(this.getClass(), methodName, args);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EventTrackerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EventTrackerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EventTrackerDUnitTest.java
index fb56e63..09fc882 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EventTrackerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EventTrackerDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.internal.cache.EventTracker.BulkOpHolder;
import com.gemstone.gemfire.internal.cache.ha.ThreadIdentifier;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -161,7 +161,7 @@ public class EventTrackerDUnitTest extends CacheTestCase {
// Create Create Region in the client
final int port = serverVM.invokeInt(EventTrackerDUnitTest.class, "getCacheServerPort");
- final String hostName = NetworkSupport.getServerHostName(host);
+ final String hostName = NetworkUtils.getServerHostName(host);
clientVM.invoke(new CacheSerializableRunnable("Create client") {
public void run2() throws CacheException {
getCache();
@@ -234,7 +234,7 @@ public class EventTrackerDUnitTest extends CacheTestCase {
// Create Create Region in the client
final int port = serverVM.invokeInt(EventTrackerDUnitTest.class, "getCacheServerPort");
- final String hostName = NetworkSupport.getServerHostName(host);
+ final String hostName = NetworkUtils.getServerHostName(host);
clientVM.invoke(new CacheSerializableRunnable("Create client") {
public void run2() throws CacheException {
getCache();
@@ -312,7 +312,7 @@ public class EventTrackerDUnitTest extends CacheTestCase {
// Create Create Region in the client
final int port = vm0.invokeInt(EventTrackerDUnitTest.class, "getCacheServerPort");
- final String hostName = NetworkSupport.getServerHostName(host);
+ final String hostName = NetworkUtils.getServerHostName(host);
vm2.invoke(new CacheSerializableRunnable("Create client") {
public void run2() throws CacheException {
getCache();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionDUnitTest.java
index 750b18c..2d19ba5 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionDUnitTest.java
@@ -26,7 +26,7 @@ import org.junit.experimental.categories.Category;
import com.gemstone.gemfire.cache.EvictionAlgorithm;
import com.gemstone.gemfire.internal.cache.lru.HeapEvictor;
import com.gemstone.gemfire.internal.cache.lru.MemLRUCapacityController;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.junit.categories.DistributedTest;
@Category(DistributedTest.class)
@@ -95,7 +95,7 @@ public class EvictionDUnitTest extends EvictionTestBase {
createPartitionedRegion(true, EvictionAlgorithm.LRU_ENTRY, "PR1", 4, 1, 1000,maxEnteries);
final PartitionedRegion pr = (PartitionedRegion)cache.getRegion("PR1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PR- " +pr.getEvictionAttributes().getMaximum());
for (int counter = 1; counter <= maxEnteries+extraEntries; counter++) {
@@ -112,7 +112,7 @@ public class EvictionDUnitTest extends EvictionTestBase {
createPartitionedRegion(true, EvictionAlgorithm.LRU_ENTRY, "PR1", 4, 1, 1000,maxEnteries);
final PartitionedRegion pr = (PartitionedRegion)cache.getRegion("PR1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PR- " +pr.getEvictionAttributes().getMaximum());
for (int i = 0; i < 3; i++) {
// assume mod-based hashing for bucket creation
@@ -139,7 +139,7 @@ public class EvictionDUnitTest extends EvictionTestBase {
createPartitionedRegion(true, EvictionAlgorithm.LRU_ENTRY, "PR1", 5, 1, 1000,maxEnteries);
final PartitionedRegion pr = (PartitionedRegion)cache.getRegion("PR1");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"PR- " +pr.getEvictionAttributes().getMaximum());
for (int counter = 1; counter <= maxEnteries+extraEntries; counter++) {
@@ -155,7 +155,7 @@ public class EvictionDUnitTest extends EvictionTestBase {
if (bucketRegion == null) {
continue;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"FINAL bucket= " + bucketRegion.getFullPath() + "size= "
+ bucketRegion.size() + " count= "+bucketRegion.entryCount());
assertEquals(4,bucketRegion.size());
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionObjectSizerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionObjectSizerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionObjectSizerDUnitTest.java
index 91dca8b..18346b7 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionObjectSizerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionObjectSizerDUnitTest.java
@@ -38,7 +38,7 @@ import com.gemstone.gemfire.internal.cache.lru.HeapLRUCapacityController;
import com.gemstone.gemfire.internal.cache.lru.Sizeable;
import com.gemstone.gemfire.internal.size.ReflectionSingleObjectSizer;
import com.gemstone.gemfire.test.dunit.Assert;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
public class EvictionObjectSizerDUnitTest extends CacheTestCase {
@@ -136,7 +136,7 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + 0 /* bytes */;
valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize);
int entrySize = keySize + valueSize + ((HeapLRUCapacityController)((PartitionedRegion)region).getEvictionController()).getPerEntryOverhead();
- LogWriterSupport.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedNonSizerObject expected= " + entrySize);
+ LogWriterUtils.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedNonSizerObject expected= " + entrySize);
assertEquals(entrySize, getSizeOfCustomizedData(1));
}
@@ -150,7 +150,7 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
int valueSize = SharedLibrary.getObjectHeaderSize() + 4 /* array length */ + 4 /* bytes */;
valueSize = (int) ReflectionSingleObjectSizer.roundUpSize(valueSize);
int entrySize = keySize + valueSize + ((HeapLRUCapacityController)((PartitionedRegion)region).getEvictionController()).getPerEntryOverhead();
- LogWriterSupport.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedNonSizerObject expected= " + entrySize);
+ LogWriterUtils.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedNonSizerObject expected= " + entrySize);
assertEquals(entrySize, getSizeOfCustomizedData(2));
}
}
@@ -168,7 +168,7 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
// Total Size of entry should be= 71
putCustomizedData(1, new TestObjectSizerImpl());
int expected = (0+160+(Sizeable.PER_OBJECT_OVERHEAD*2)+((HeapLRUCapacityController)((PartitionedRegion)region).getEvictionController()).getPerEntryOverhead());
- LogWriterSupport.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedSizerObject expected= " + expected);
+ LogWriterUtils.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedSizerObject expected= " + expected);
assertEquals(expected, getSizeOfCustomizedData(1));
assertEquals(expected, ((PartitionedRegion)region).getEvictionController()
.getLRUHelper().getStats().getCounter());
@@ -186,7 +186,7 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
// Total Size of entry should be= 72
putCustomizedObjects(new TestNonSizerObject("1"), new TestObjectSizerImpl());
int expected = (1+160+(Sizeable.PER_OBJECT_OVERHEAD*2)+((HeapLRUCapacityController)((PartitionedRegion)region).getEvictionController()).getPerEntryOverhead());
- LogWriterSupport.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedSizerObjects expected= " + expected);
+ LogWriterUtils.getLogWriter().info("testObjectSizerForHeapLRU_CustomizedSizerObjects expected= " + expected);
assertEquals(expected, getSizeOfCustomizedObject(new TestNonSizerObject("1")));
assertEquals(expected, ((PartitionedRegion)region).getEvictionController()
.getLRUHelper().getStats().getCounter());
@@ -207,8 +207,8 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
- LogWriterSupport.getLogWriter().info("cache= " + cache);
- LogWriterSupport.getLogWriter().info("cache closed= " + cache.isClosed());
+ LogWriterUtils.getLogWriter().info("cache= " + cache);
+ LogWriterUtils.getLogWriter().info("cache closed= " + cache.isClosed());
cache.getResourceManager().setEvictionHeapPercentage(50);
}
catch (Exception e) {
@@ -261,7 +261,7 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
region = cache.createRegion(regionName, factory.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Partitioned Region created Successfully :" + region);
+ LogWriterUtils.getLogWriter().info("Partitioned Region created Successfully :" + region);
}
/**
@@ -295,7 +295,7 @@ public class EvictionObjectSizerDUnitTest extends CacheTestCase {
if (map == null || map.size() == 0) {
continue;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Checking for entry in bucket region: " + bucketRegion);
for (int counter = 1; counter <= noOfElememts; counter++) {
assertEquals(entrySize, ((AbstractLRURegionEntry)map
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionStatsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionStatsDUnitTest.java
index 3ea1999..a31e1e7 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionStatsDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/EvictionStatsDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.internal.cache.lru.HeapEvictor;
import com.gemstone.gemfire.internal.cache.lru.HeapLRUCapacityController;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
@@ -327,14 +327,14 @@ public class EvictionStatsDUnitTest extends CacheTestCase {
@Override
public void run2() throws CacheException {
PartitionedRegion pr1 = (PartitionedRegion)cache.getRegion("PR1");
- LogWriterSupport.getLogWriter().info("dddd local"+pr1.getLocalMaxMemory());
- LogWriterSupport.getLogWriter().info("dddd local evi"+((AbstractLRURegionMap)pr1.entries)._getLruList().stats()
+ LogWriterUtils.getLogWriter().info("dddd local"+pr1.getLocalMaxMemory());
+ LogWriterUtils.getLogWriter().info("dddd local evi"+((AbstractLRURegionMap)pr1.entries)._getLruList().stats()
.getEvictions());
- LogWriterSupport.getLogWriter().info("dddd local entries"+((AbstractLRURegionMap)pr1.entries)._getLruList().stats()
+ LogWriterUtils.getLogWriter().info("dddd local entries"+((AbstractLRURegionMap)pr1.entries)._getLruList().stats()
.getCounter()/(1024*1024));
HeapMemoryMonitor hmm = ((InternalResourceManager) cache.getResourceManager()).getHeapMonitor();
long memused=hmm.getBytesUsed()/(1024*1024);
- LogWriterSupport.getLogWriter().info("dddd local memused= "+memused);
+ LogWriterUtils.getLogWriter().info("dddd local memused= "+memused);
assertTrue(((AbstractLRURegionMap)pr1.entries)._getLruList().stats()
.getEvictions() >= extraEntries / 2);
assertEquals(((AbstractLRURegionMap)pr1.entries)._getLruList().stats()
@@ -412,8 +412,8 @@ public class EvictionStatsDUnitTest extends CacheTestCase {
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
- LogWriterSupport.getLogWriter().info("cache= " + cache);
- LogWriterSupport.getLogWriter().info("cache closed= " + cache.isClosed());
+ LogWriterUtils.getLogWriter().info("cache= " + cache);
+ LogWriterUtils.getLogWriter().info("cache closed= " + cache.isClosed());
cache.getResourceManager().setEvictionHeapPercentage(20);
}
catch (Exception e) {
@@ -484,7 +484,7 @@ public class EvictionStatsDUnitTest extends CacheTestCase {
region = cache.createRegion(regionName, factory.create());
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Partitioned Region created Successfully :" + region);
+ LogWriterUtils.getLogWriter().info("Partitioned Region created Successfully :" + region);
}
public static void putData(final String regionName, final int noOfElememts) {
@@ -543,7 +543,7 @@ public class EvictionStatsDUnitTest extends CacheTestCase {
if (bucketRegion == null) {
continue;
}
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Size of bucket " + bucketRegion.getName() + "of Pr " + prRegionName
+ " = " + bucketRegion.getCounter() / (1000000));
bucketSize = bucketSize + bucketRegion.getCounter();
[15/33] incubator-geode git commit: Cleanup
Posted by kl...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableRegistrationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableRegistrationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableRegistrationDUnitTest.java
index de0f44a..132ab16 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableRegistrationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableRegistrationDUnitTest.java
@@ -42,8 +42,8 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -129,7 +129,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
// seconds
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout), Boolean.TRUE });
@@ -202,7 +202,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
// Step 8: Re-start the Client
this.durableClientVM
.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] { getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
+ new Object[] { getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
regionName,
getClientDistributedSystemProperties(durableClientId),
Boolean.TRUE });
@@ -281,7 +281,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
final int durableClientTimeout = 600;
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout) });
@@ -354,7 +354,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
// Step 8: Re-start the Client
this.durableClientVM
.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] { getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
+ new Object[] { getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 0),
regionName,
getClientDistributedSystemProperties(durableClientId),
Boolean.TRUE });
@@ -472,7 +472,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
final int durableClientTimeout = 600;
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 1),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 1),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout) });
@@ -507,7 +507,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
server2VM.invoke(new CacheSerializableRunnable("Verify Interests.") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Verifying interests registered by DurableClient. ###");
+ LogWriterUtils.getLogWriter().info("### Verifying interests registered by DurableClient. ###");
CacheClientNotifier ccn = CacheClientNotifier.getInstance();
CacheClientProxy p = null;
@@ -572,7 +572,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
final int durableClientTimeout = 600;
this.durableClientVM.invoke(CacheServerTestUtil.class, "createCacheClient",
new Object[] {
- getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 1),
+ getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 1),
regionName,
getClientDistributedSystemProperties(durableClientId,
durableClientTimeout) });
@@ -604,7 +604,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
//Re-start the Client
this.durableClientVM
.invoke(CacheServerTestUtil.class, "createCacheClient",
- new Object[] { getClientPool(NetworkSupport.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 1),
+ new Object[] { getClientPool(NetworkUtils.getServerHostName(durableClientVM.getHost()), PORT1, PORT2, true, 1),
regionName,
getClientDistributedSystemProperties(durableClientId),
Boolean.TRUE });
@@ -628,7 +628,7 @@ public class DurableRegistrationDUnitTest extends DistributedTestCase {
server2VM.invoke(new CacheSerializableRunnable("Verify Interests.") {
public void run2() throws CacheException
{
- LogWriterSupport.getLogWriter().info("### Verifying interests registered by DurableClient. ###");
+ LogWriterUtils.getLogWriter().info("### Verifying interests registered by DurableClient. ###");
CacheClientNotifier ccn = CacheClientNotifier.getInstance();
CacheClientProxy p = null;
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableResponseMatrixDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableResponseMatrixDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableResponseMatrixDUnitTest.java
index 8edd412..4ee36bd 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableResponseMatrixDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/DurableResponseMatrixDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -71,7 +71,7 @@ public class DurableResponseMatrixDUnitTest extends DistributedTestCase
// start servers first
PORT1 = ((Integer)server1.invoke(DurableResponseMatrixDUnitTest.class,
"createServerCache"));
- createCacheClient(NetworkSupport.getServerHostName(server1.getHost()));
+ createCacheClient(NetworkUtils.getServerHostName(server1.getHost()));
//Disconnecting the client can cause this
IgnoredException.addIgnoredException("Connection reset||Unexpected IOException");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/EventIDVerificationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/EventIDVerificationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/EventIDVerificationDUnitTest.java
index 3c4efd8..61c6c78 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/EventIDVerificationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/EventIDVerificationDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.cache.RegionEventImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
/**
@@ -99,7 +99,7 @@ public class EventIDVerificationDUnitTest extends DistributedTestCase
//vm2.invoke(EventIDVerificationDUnitTest.class, "createClientCache", new
// Object[] { new Integer(PORT1),new Integer(PORT2)});
- createClientCache(NetworkSupport.getServerHostName(host), new Integer(PORT1), new Integer(PORT2));
+ createClientCache(NetworkUtils.getServerHostName(host), new Integer(PORT1), new Integer(PORT2));
CacheObserverHolder.setInstance(new CacheObserverAdapter());
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ForceInvalidateEvictionDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ForceInvalidateEvictionDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ForceInvalidateEvictionDUnitTest.java
index e13644f..9313ddd 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ForceInvalidateEvictionDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/ForceInvalidateEvictionDUnitTest.java
@@ -45,7 +45,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.Token;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -345,7 +345,7 @@ public class ForceInvalidateEvictionDUnitTest extends CacheTestCase {
Cache cache = getCache();
PoolFactory pf = PoolManager.createFactory();
- pf.addServer(NetworkSupport.getServerHostName(host), port);
+ pf.addServer(NetworkUtils.getServerHostName(host), port);
pf.setSubscriptionEnabled(true);
pf.create(name);
RegionFactory rf = new RegionFactory();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HABug36738DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HABug36738DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HABug36738DUnitTest.java
index 6b63b77..65d7a7c 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HABug36738DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HABug36738DUnitTest.java
@@ -39,7 +39,7 @@ import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID;
import com.gemstone.gemfire.internal.cache.tier.sockets.ClientUpdateMessage;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -115,7 +115,7 @@ public class HABug36738DUnitTest extends DistributedTestCase
while (itr.hasNext()) {
Object key = itr.next();
ClientUpdateMessage value = (ClientUpdateMessage)region.get(key);
- LogWriterSupport.getLogWriter().info("key : " + key + "Value " + value.getValue());
+ LogWriterUtils.getLogWriter().info("key : " + key + "Value " + value.getValue());
}
@@ -163,7 +163,7 @@ public class HABug36738DUnitTest extends DistributedTestCase
new EventID(("memberID" + i).getBytes(), i, i));
haRegion.put(new Long(i), clientMessage);
- LogWriterSupport.getLogWriter().info("Putting in the message Queue");
+ LogWriterUtils.getLogWriter().info("Putting in the message Queue");
}
}
@@ -192,7 +192,7 @@ public class HABug36738DUnitTest extends DistributedTestCase
HARegion region = (HARegion)cache.getRegion(Region.SEPARATOR
+ HAHelper.getRegionQueueName(HAREGION_NAME));
assertNotNull(region);
- LogWriterSupport.getLogWriter().info("Size of the Queue : " + region.size());
+ LogWriterUtils.getLogWriter().info("Size of the Queue : " + region.size());
}
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart1DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart1DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart1DUnitTest.java
index 3616f9b..fc9211d 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart1DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart1DUnitTest.java
@@ -17,7 +17,7 @@
package com.gemstone.gemfire.internal.cache.tier.sockets;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
@SuppressWarnings("serial")
@@ -31,7 +31,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
* Tests whether interest is registered or not on both primary and secondaries
*/
public void testInterestRegistrationOnBothPrimaryAndSecondary() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -48,7 +48,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
* verify their responses
*/
public void testInterestRegistrationResponseOnBothPrimaryAndSecondary() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -62,7 +62,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
* interest map
*/
public void testRERegistrationWillNotCreateDuplicateKeysOnServerInterstMaps() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -81,7 +81,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
* registerInterest
*/
public void testPrimaryFailureInRegisterInterest() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -102,7 +102,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
* list
*/
public void testSecondaryFailureInRegisterInterest() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -124,7 +124,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
* registration on newly selected primary
*/
public void testBothPrimaryAndSecondaryFailureInRegisterInterest() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -148,7 +148,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
*
*/
public void testProbablePrimaryFailureInRegisterInterest() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -171,7 +171,7 @@ public class HAInterestPart1DUnitTest extends HAInterestTestCase {
IgnoredException.addIgnoredException("SocketException");
IgnoredException.addIgnoredException("Unexpected IOException");
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
registerK1AndK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart2DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart2DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart2DUnitTest.java
index b955d38..647069f 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart2DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestPart2DUnitTest.java
@@ -20,7 +20,7 @@ import com.gemstone.gemfire.cache.EntryDestroyedException;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.client.ServerConnectivityException;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -37,7 +37,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
* failover should pick new primary
*/
public void testPrimaryFailureInUNregisterInterest() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -63,7 +63,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
* Ep list
*/
public void testSecondaryFailureInUNRegisterInterest() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -85,7 +85,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
* detected live server as well
*/
public void testDSMDetectsServerLiveJustBeforeInterestRegistration() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -111,7 +111,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
* detected live server as well
*/
public void testDSMDetectsServerLiveJustAfterInterestRegistration() throws Exception {
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
createEntriesK1andK2();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
@@ -147,7 +147,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
PORT1 = ((Integer) server1.invoke(HAInterestTestCase.class, "createServerCache")).intValue();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
- createClientPoolCacheConnectionToSingleServer(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCacheConnectionToSingleServer(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
registerK1AndK2();
verifyRefreshedEntriesFromServer();
@@ -225,7 +225,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
server3.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
- createClientPoolCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
VM backup1 = getBackupVM();
VM backup2 = getBackupVM(backup1);
@@ -250,7 +250,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
public void testBug35945() throws Exception {
PORT1 = ((Integer) server1.invoke(HAInterestTestCase.class, "createServerCache")).intValue();
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
- createClientPoolCacheConnectionToSingleServer(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCacheConnectionToSingleServer(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
registerK1AndK2();
verifyRefreshedEntriesFromServer();
@@ -320,7 +320,7 @@ public class HAInterestPart2DUnitTest extends HAInterestTestCase {
server1.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
PORT2 = ((Integer) server2.invoke(HAInterestTestCase.class, "createServerCache")).intValue();
server2.invoke(HAInterestTestCase.class, "createEntriesK1andK2");
- createClientPoolCacheWithSmallRetryInterval(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientPoolCacheWithSmallRetryInterval(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
registerK1AndK2();
verifyRefreshedEntriesFromServer();
VM backup = getBackupVM();
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestTestCase.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestTestCase.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestTestCase.java
index 89be314..852a5bd 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestTestCase.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAInterestTestCase.java
@@ -42,8 +42,7 @@ import com.gemstone.gemfire.internal.cache.tier.InterestType;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.Threads;
+import com.gemstone.gemfire.test.dunit.ThreadUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -314,7 +313,7 @@ public class HAInterestTestCase extends DistributedTestCase {
};
t.start();
try {
- Threads.join(t, 30 * 1000, LogWriterSupport.getLogWriter());
+ ThreadUtils.join(t, 30 * 1000);
} catch (Exception ignore) {
exceptionOccured = true;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAStartupAndFailoverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAStartupAndFailoverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAStartupAndFailoverDUnitTest.java
index b82a085..5cc561c 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAStartupAndFailoverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/HAStartupAndFailoverDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -103,7 +103,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
public void testPrimaryFailover() throws Exception
{
- createClientCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
// primary
server1.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsAlive");
@@ -151,7 +151,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
public void testExceptionWhileMakingPrimary()throws Exception
{
- createClientCacheWithIncorrectPrimary(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCacheWithIncorrectPrimary(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
// failed primary due to incorect host name of the server
// new primary
@@ -180,7 +180,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
public void testTwoPrimaryFailedOneAfterTheAnother() throws Exception
{
- createClientCacheWithLargeRetryInterval(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCacheWithLargeRetryInterval(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
// primary
server1.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsAlive");
@@ -207,7 +207,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
*/
public void testPrimaryShouldBeNullAndEPListShouldBeEmptyWhenAllServersAreDead() throws Exception
{
- createClientCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
verifyPrimaryShouldNotBeNullAndEPListShouldNotBeEmpty();
server1.invoke(HAStartupAndFailoverDUnitTest.class, "stopServer");
server2.invoke(HAStartupAndFailoverDUnitTest.class, "stopServer");
@@ -221,7 +221,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
*/
public void testCacheClientUpdatersInitiatesFailoverOnPrimaryFailure() throws Exception
{
- createClientCacheWithLargeRetryInterval(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCacheWithLargeRetryInterval(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
server1.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsAlive");
server2.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
server3.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
@@ -239,7 +239,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
*/
public void testCacheClientUpdaterInitiatesFailoverOnSecondaryFailure() throws Exception
{
- createClientCacheWithLargeRetryInterval(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCacheWithLargeRetryInterval(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
server1.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsAlive");
server2.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
server3.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
@@ -258,7 +258,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
public void testCacheClientUpdaterInitiatesFailoverOnBothPrimaryAndSecondaryFailure() throws Exception
{
- createClientCacheWithLargeRetryInterval(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCacheWithLargeRetryInterval(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
server1.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsAlive");
server2.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
server3.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
@@ -276,7 +276,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
public void testCacheClientUpdaterInitiatesFailoverOnBothPrimaryAndSecondaryFailureWithServerMonitors() throws Exception
{
- createClientCache(this.getName(), NetworkSupport.getServerHostName(server1.getHost()));
+ createClientCache(this.getName(), NetworkUtils.getServerHostName(server1.getHost()));
server1.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsAlive");
server2.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
server3.invoke(HAStartupAndFailoverDUnitTest.class, "verifyDispatcherIsNotAlive");
@@ -295,7 +295,7 @@ public class HAStartupAndFailoverDUnitTest extends DistributedTestCase
// create a client with large retry interval for server monitors and no client updater thread
// so that only cache operation can detect a server failure and should initiate failover
createClientCacheWithLargeRetryIntervalAndWithoutCallbackConnection(this.getName()
- , NetworkSupport.getServerHostName(server1.getHost()));
+ , NetworkUtils.getServerHostName(server1.getHost()));
server2.invoke(HAStartupAndFailoverDUnitTest.class, "stopServer");
put();
verifyDeadAndLiveServers(1,2);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InstantiatorPropagationDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InstantiatorPropagationDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InstantiatorPropagationDUnitTest.java
index 514a724..11482e5 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InstantiatorPropagationDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InstantiatorPropagationDUnitTest.java
@@ -16,7 +16,7 @@
*/
package com.gemstone.gemfire.internal.cache.tier.sockets;
-import static com.gemstone.gemfire.test.dunit.DistributedTestSupport.*;
+import static com.gemstone.gemfire.test.dunit.DistributedTestUtils.*;
import java.io.DataInput;
import java.io.DataOutput;
@@ -51,7 +51,7 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -482,7 +482,7 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
client1
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
// // wait for client2 to come online
@@ -547,11 +547,11 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
client1
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
client2
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT2) });
unregisterInstantiatorsInAllVMs();
@@ -591,11 +591,11 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
client1
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
client2
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT2) });
unregisterInstantiatorsInAllVMs();
@@ -646,11 +646,11 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
client1
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
client2
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT2) });
unregisterInstantiatorsInAllVMs();
@@ -690,11 +690,11 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
PORT2 = initServerCache(server2);
client1
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
client2
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT2) });
unregisterInstantiatorsInAllVMs();
@@ -758,13 +758,13 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
client1
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
client2
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(server1.getHost()),
+ new Object[] { NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1) });
- createClientCache(NetworkSupport.getServerHostName(server2.getHost()), new Integer(PORT2));
+ createClientCache(NetworkUtils.getServerHostName(server2.getHost()), new Integer(PORT2));
unregisterInstantiatorsInAllVMs();
// wait for client2 to come online
@@ -817,13 +817,13 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
PORT1 = initServerCache(server1, 1);
PORT2 = initServerCache(server2, 2);
- createClientCache_EventId(NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1));
+ createClientCache_EventId(NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1));
unregisterInstantiatorsInAllVMs();
client2.invoke(InstantiatorPropagationDUnitTest.class,
"createClientCache_EventId", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT2) });
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT2) });
setClientServerObserver1();
client2.invoke(InstantiatorPropagationDUnitTest.class,
"setClientServerObserver2");
@@ -850,12 +850,12 @@ public class InstantiatorPropagationDUnitTest extends DistributedTestCase {
Wait.pause(3000);
- createClientCache(NetworkSupport.getServerHostName(server1.getHost()),
+ createClientCache(NetworkUtils.getServerHostName(server1.getHost()),
new Integer(PORT1));
client2
.invoke(InstantiatorPropagationDUnitTest.class, "createClientCache",
- new Object[] {NetworkSupport.getServerHostName(server2.getHost()),
+ new Object[] {NetworkUtils.getServerHostName(server2.getHost()),
new Integer(PORT2)});
Wait.pause(3000);
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListDUnitTest.java
index 189ebd6..252916e 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListDUnitTest.java
@@ -51,8 +51,8 @@ import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -167,9 +167,9 @@ public class InterestListDUnitTest extends DistributedTestCase
{
vm1.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1)});
vm2.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1)});
vm1.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
vm2.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
@@ -232,9 +232,9 @@ public class InterestListDUnitTest extends DistributedTestCase
// Initialization
vm1.invoke(InterestListDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1)});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1)});
vm2.invoke(InterestListDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT1)});
+ new Object[] { NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT1)});
vm1.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
vm2.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
@@ -290,9 +290,9 @@ public class InterestListDUnitTest extends DistributedTestCase
{
vm1.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1)});
vm2.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1)});
vm1.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
vm2.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
@@ -320,9 +320,9 @@ public class InterestListDUnitTest extends DistributedTestCase
Wait.pause(1000);
// create clients to connect to that server
vm1.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1)});
vm2.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1)});
+ NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1)});
// register interest
vm1.invoke(InterestListDUnitTest.class, "registerKeys");
@@ -358,10 +358,10 @@ public class InterestListDUnitTest extends DistributedTestCase
DistributedMember c1 = (DistributedMember)vm1
.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), PORT1});
+ NetworkUtils.getServerHostName(vm0.getHost()), PORT1});
DistributedMember c2 = (DistributedMember)vm2
.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), PORT1});
+ NetworkUtils.getServerHostName(vm0.getHost()), PORT1});
vm1.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
vm2.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
@@ -428,17 +428,17 @@ public class InterestListDUnitTest extends DistributedTestCase
// servers are set up, now do the clients
DistributedMember c1 = (DistributedMember)vm1
.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), PORT1, port2});
+ NetworkUtils.getServerHostName(vm0.getHost()), PORT1, port2});
DistributedMember c2 = (DistributedMember)vm2
.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), PORT1, port2});
+ NetworkUtils.getServerHostName(vm0.getHost()), PORT1, port2});
vm1.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
vm2.invoke(InterestListDUnitTest.class, "createEntriesK1andK2");
// interest registration from clients should cause listeners to be invoked
// in both servers
- LogWriterSupport.getLogWriter().info("test phase 1");
+ LogWriterUtils.getLogWriter().info("test phase 1");
vm1.invoke(InterestListDUnitTest.class, "registerKey",
new Object[] { key1 });
vm2.invoke(InterestListDUnitTest.class, "registerKey",
@@ -452,7 +452,7 @@ public class InterestListDUnitTest extends DistributedTestCase
new Object[]{ two, zero });
// unregistration from clients should invoke listeners on both servers
- LogWriterSupport.getLogWriter().info("test phase 2");
+ LogWriterUtils.getLogWriter().info("test phase 2");
vm1.invoke(InterestListDUnitTest.class, "unregisterKey",
new Object[] { key1 });
vm2.invoke(InterestListDUnitTest.class, "unregisterKey",
@@ -463,7 +463,7 @@ public class InterestListDUnitTest extends DistributedTestCase
new Object[]{ zero, two });
// now the primary server for eache client will register and unregister
- LogWriterSupport.getLogWriter().info("test phase 3");
+ LogWriterUtils.getLogWriter().info("test phase 3");
registerKeyForClient(c1, key1);
vm0.invoke(InterestListDUnitTest.class, "registerKeyForClient",
new Object[] { c1, key1 });
@@ -475,7 +475,7 @@ public class InterestListDUnitTest extends DistributedTestCase
vm0.invoke(InterestListDUnitTest.class, "verifyCountsAndClear",
new Object[]{ two, zero });
- LogWriterSupport.getLogWriter().info("test phase 4");
+ LogWriterUtils.getLogWriter().info("test phase 4");
unregisterKeyForClient(c1, key1);
vm0.invoke(InterestListDUnitTest.class, "unregisterKeyForClient",
new Object[] { c1, key1 });
@@ -496,7 +496,7 @@ public class InterestListDUnitTest extends DistributedTestCase
// Register interest in key1.
vm1.invoke(InterestListDUnitTest.class, "createClientCache",
- new Object[] { NetworkSupport.getServerHostName(vm0.getHost()), new Integer(PORT1) });
+ new Object[] { NetworkUtils.getServerHostName(vm0.getHost()), new Integer(PORT1) });
vm1.invoke(InterestListDUnitTest.class, "registerKey",
new Object[] { key1 });
@@ -535,7 +535,7 @@ public class InterestListDUnitTest extends DistributedTestCase
// Create client cache
vm1.invoke(InterestListDUnitTest.class, "createClientCache",new Object[] {
- NetworkSupport.getServerHostName(vm0.getHost()), port1, port2});
+ NetworkUtils.getServerHostName(vm0.getHost()), port1, port2});
// Register interest in all keys
vm1.invoke(InterestListDUnitTest.class, "registerALL_KEYS");
@@ -795,7 +795,7 @@ public class InterestListDUnitTest extends DistributedTestCase
}
catch (NoSubscriptionServersAvailableException ex) {
// expected an exception
- LogWriterSupport.getLogWriter().info("Got expected exception in registerKey: ");
+ LogWriterUtils.getLogWriter().info("Got expected exception in registerKey: ");
}
}
@@ -1104,7 +1104,7 @@ public class InterestListDUnitTest extends DistributedTestCase
* @see com.gemstone.gemfire.cache.InterestRegistrationListener#afterRegisterInterest(com.gemstone.gemfire.cache.InterestRegistrationEvent)
*/
public void afterRegisterInterest(InterestRegistrationEvent event) {
- LogWriterSupport.getLogWriter().info("InterestListener.afterRegisterInterest invoked with this event: " + event);
+ LogWriterUtils.getLogWriter().info("InterestListener.afterRegisterInterest invoked with this event: " + event);
registrationCount++;
}
@@ -1112,7 +1112,7 @@ public class InterestListDUnitTest extends DistributedTestCase
* @see com.gemstone.gemfire.cache.InterestRegistrationListener#afterUnregisterInterest(com.gemstone.gemfire.cache.InterestRegistrationEvent)
*/
public void afterUnregisterInterest(InterestRegistrationEvent event) {
- LogWriterSupport.getLogWriter().info("InterestListener.afterUnregisterInterest invoked with this event: " + event);
+ LogWriterUtils.getLogWriter().info("InterestListener.afterUnregisterInterest invoked with this event: " + event);
unregistrationCount++;
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListEndpointDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListEndpointDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListEndpointDUnitTest.java
index 5ad9aaa..c0494d3 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListEndpointDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListEndpointDUnitTest.java
@@ -47,7 +47,7 @@ import com.gemstone.gemfire.internal.cache.CacheServerImpl;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.Invoke;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.SerializableRunnable;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -111,7 +111,7 @@ public class InterestListEndpointDUnitTest extends DistributedTestCase
// then create client
Wait.pause(5000); // [bruce] avoid ConnectException
client1.invoke(impl.getClass(), "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
+ NetworkUtils.getServerHostName(server1.getHost()), new Integer(PORT1),new Integer(PORT2)});
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListFailoverDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListFailoverDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListFailoverDUnitTest.java
index be28fd8..a3ec211 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListFailoverDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListFailoverDUnitTest.java
@@ -28,7 +28,7 @@ import com.gemstone.gemfire.internal.cache.PoolFactoryImpl;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -99,9 +99,9 @@ public class InterestListFailoverDUnitTest extends DistributedTestCase
vm1.invoke(CacheServerTestUtil.class, "disableShufflingOfEndpoints");
vm2.invoke(CacheServerTestUtil.class, "disableShufflingOfEndpoints");
vm1.invoke(CacheServerTestUtil.class, "createCacheClient", new Object[] {
- getClientPool(NetworkSupport.getServerHostName(host),redundancyLevel), REGION_NAME });
+ getClientPool(NetworkUtils.getServerHostName(host),redundancyLevel), REGION_NAME });
vm2.invoke(CacheServerTestUtil.class, "createCacheClient", new Object[] {
- getClientPool(NetworkSupport.getServerHostName(host),0), REGION_NAME });
+ getClientPool(NetworkUtils.getServerHostName(host),0), REGION_NAME });
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListRecoveryDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListRecoveryDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListRecoveryDUnitTest.java
index cdc0c2b..5785baf 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListRecoveryDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestListRecoveryDUnitTest.java
@@ -42,7 +42,7 @@ import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -100,10 +100,10 @@ public class InterestListRecoveryDUnitTest extends DistributedTestCase
PORT1 = ((Integer)server1.invoke(InterestListRecoveryDUnitTest.class, "createServerCache" )).intValue();
PORT2 = ((Integer)server2.invoke(InterestListRecoveryDUnitTest.class, "createServerCache" )).intValue();
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("server1 port is " + String.valueOf(PORT1));
- com.gemstone.gemfire.test.dunit.LogWriterSupport.getLogWriter().info("server2 port is " + String.valueOf(PORT2));
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("server1 port is " + String.valueOf(PORT1));
+ com.gemstone.gemfire.test.dunit.LogWriterUtils.getLogWriter().info("server2 port is " + String.valueOf(PORT2));
- createClientCache(NetworkSupport.getServerHostName(host), new Integer(PORT1), new Integer(PORT2));
+ createClientCache(NetworkUtils.getServerHostName(host), new Integer(PORT1), new Integer(PORT2));
}
// this test fails because of bug# 35352 , hence commented the bug is Deferred to: Danube
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestRegrListenerDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestRegrListenerDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestRegrListenerDUnitTest.java
index 001ed8f..f122fe6 100644
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestRegrListenerDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestRegrListenerDUnitTest.java
@@ -40,7 +40,7 @@ import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.LogWriterSupport;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -139,7 +139,7 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
}
private void doExpressInterestOnServer(boolean isDurable) {
- LogWriterSupport.getLogWriter().info("Total ClientSessions " + cacheServer.getAllClientSessions().size());
+ LogWriterUtils.getLogWriter().info("Total ClientSessions " + cacheServer.getAllClientSessions().size());
for(ClientSession c : this.cacheServer.getAllClientSessions()) {
c.registerInterestRegex("/serverRegion", ".*", isDurable);
}
@@ -158,7 +158,7 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
intCount = count.intValue();
intCount++;
InterestRegrListenerDUnitTest.this.listnerMap.put(UNREGISTER_INTEREST, intCount);
- LogWriterSupport.getLogWriter().info("InterestRegistrationListener afterUnregisterInterest for "
+ LogWriterUtils.getLogWriter().info("InterestRegistrationListener afterUnregisterInterest for "
+ event.getRegionName() + " keys " + event.getKeysOfInterest() + "Count " + intCount + " Client : " + event.getClientSession().toString());
}
@@ -170,11 +170,11 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
intCount = count.intValue();
intCount++;
InterestRegrListenerDUnitTest.this.listnerMap.put(REGISTER_INTEREST, intCount);
- LogWriterSupport.getLogWriter().info("InterestRegistrationListener afterRegisterInterest for "
+ LogWriterUtils.getLogWriter().info("InterestRegistrationListener afterRegisterInterest for "
+ event.getRegionName() + " keys " + event.getKeysOfInterest() + "Count " + intCount + " Client : " + event.getClientSession().toString());
}
};
- LogWriterSupport.getLogWriter().info("Registered InterestRegistationLister");
+ LogWriterUtils.getLogWriter().info("Registered InterestRegistationLister");
this.cacheServer.registerInterestRegistrationListener(listener);
}
@@ -192,7 +192,7 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
ClientRegionFactory<String,String> regionFactory = clientCache.createClientRegionFactory(ClientRegionShortcut.PROXY);
Region<String, String> region = regionFactory.create("serverRegion");
- LogWriterSupport.getLogWriter().info(
+ LogWriterUtils.getLogWriter().info(
"Client Cache is created in this vm connected to cacheServer " + host
+ ":" + port + " durable? " + isDurable + " with VMID=" + vmID + " region " + region.getFullPath() + " regionSize " + region.size());
assertNotNull(clientCache);
@@ -269,15 +269,15 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
params[1] = port;
params[2] = true;
params[3] = "VM_1";
- LogWriterSupport.getLogWriter().info("Starting client1 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client1 with server endpoint <" + hostName + ">:" + port);
clientVM_1.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
params[3] = "VM_2";
- LogWriterSupport.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
clientVM_2.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
params[3] = "VM_3";
- LogWriterSupport.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
clientVM_3.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
@@ -299,15 +299,15 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
clientVM_3.invoke(InterestRegrListenerDUnitTest.class, "closeClientCacheTask", new Object[]{true});
Thread.sleep(2);
Map<String,Integer> listnerMap = (Map<String, Integer>) serverVM.invoke(InterestRegrListenerDUnitTest.class, "getListenerMapTask");
- LogWriterSupport.getLogWriter().info("Listener Map " + listnerMap);
+ LogWriterUtils.getLogWriter().info("Listener Map " + listnerMap);
int registerCount = getMapValueForKey(listnerMap,REGISTER_INTEREST);
int unregisterCount = getMapValueForKey(listnerMap,UNREGISTER_INTEREST);
assertEquals(3, registerCount);
assertEquals(0, unregisterCount);
- LogWriterSupport.getLogWriter().info("Sleeping till durable client queue are expired and unregister event is called on to listener");
+ LogWriterUtils.getLogWriter().info("Sleeping till durable client queue are expired and unregister event is called on to listener");
Thread.sleep((DURABLE_CLIENT_TIMEOUT+5)*1000);
listnerMap = (Map<String, Integer>) serverVM.invoke(InterestRegrListenerDUnitTest.class, "getListenerMapTask");
- LogWriterSupport.getLogWriter().info("Listener Map after sleeping " + listnerMap);
+ LogWriterUtils.getLogWriter().info("Listener Map after sleeping " + listnerMap);
registerCount = getMapValueForKey(listnerMap,REGISTER_INTEREST);
unregisterCount = getMapValueForKey(listnerMap,UNREGISTER_INTEREST);
assertEquals(3, registerCount);
@@ -334,15 +334,15 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
params[1] = port;
params[2] = true;
params[3] = "VM_1";
- LogWriterSupport.getLogWriter().info("Starting client1 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client1 with server endpoint <" + hostName + ">:" + port);
clientVM_1.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
params[3] = "VM_2";
- LogWriterSupport.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
clientVM_2.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
params[3] = "VM_3";
- LogWriterSupport.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
clientVM_3.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
@@ -361,15 +361,15 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
clientVM_3.invoke(InterestRegrListenerDUnitTest.class, "closeClientCacheTask", new Object[]{true});
Thread.sleep(2);
Map<String,Integer> listnerMap = (Map<String, Integer>) serverVM.invoke(InterestRegrListenerDUnitTest.class, "getListenerMapTask");
- LogWriterSupport.getLogWriter().info("Listener Map " + listnerMap);
+ LogWriterUtils.getLogWriter().info("Listener Map " + listnerMap);
int registerCount = getMapValueForKey(listnerMap,REGISTER_INTEREST);
int unregisterCount = getMapValueForKey(listnerMap,UNREGISTER_INTEREST);
assertEquals(3, registerCount);
assertEquals(0, unregisterCount);
- LogWriterSupport.getLogWriter().info("Sleeping till durable client queue are expired and unregister event is called on to listener");
+ LogWriterUtils.getLogWriter().info("Sleeping till durable client queue are expired and unregister event is called on to listener");
Thread.sleep((DURABLE_CLIENT_TIMEOUT+5)*1000);
listnerMap = (Map<String, Integer>) serverVM.invoke(InterestRegrListenerDUnitTest.class, "getListenerMapTask");
- LogWriterSupport.getLogWriter().info("Listener Map after sleeping " + listnerMap);
+ LogWriterUtils.getLogWriter().info("Listener Map after sleeping " + listnerMap);
registerCount = getMapValueForKey(listnerMap,REGISTER_INTEREST);
unregisterCount = getMapValueForKey(listnerMap,UNREGISTER_INTEREST);
assertEquals(3, registerCount);
@@ -398,15 +398,15 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
params[1] = port;
params[2] = true;
params[3] = "VM_1";
- LogWriterSupport.getLogWriter().info("Starting client1 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client1 with server endpoint <" + hostName + ">:" + port);
clientVM_1.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
params[3] = "VM_2";
- LogWriterSupport.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
clientVM_2.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
params[3] = "VM_3";
- LogWriterSupport.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
+ LogWriterUtils.getLogWriter().info("Starting client2 with server endpoint <" + hostName + ">:" + port);
clientVM_3.invoke(InterestRegrListenerDUnitTest.class, "setUpClientVMTask", params);
@@ -434,13 +434,13 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
@Override
public boolean done() {
Map<String,Integer> listnerMap = (Map<String, Integer>) serverVM.invoke(InterestRegrListenerDUnitTest.class, "getListenerMapTask");
- LogWriterSupport.getLogWriter().info("Listener Map " + listnerMap);
+ LogWriterUtils.getLogWriter().info("Listener Map " + listnerMap);
registerCount = getMapValueForKey(listnerMap,REGISTER_INTEREST);
unregisterCount = getMapValueForKey(listnerMap,UNREGISTER_INTEREST);
if (registerCount == 3 && unregisterCount == 3) {
return true;
}
- LogWriterSupport.getLogWriter().info("Waiting for counts to each reach 3. Current registerCount="+registerCount+"; unregisterCount="+unregisterCount);
+ LogWriterUtils.getLogWriter().info("Waiting for counts to each reach 3. Current registerCount="+registerCount+"; unregisterCount="+unregisterCount);
return false;
}
@@ -452,7 +452,7 @@ public class InterestRegrListenerDUnitTest extends DistributedTestCase {
Wait.waitForCriterion(wc, 20000, 500, true);
- LogWriterSupport.getLogWriter().info("Sleeping till durable client queue are expired and unregister event is called on to listener");
+ LogWriterUtils.getLogWriter().info("Sleeping till durable client queue are expired and unregister event is called on to listener");
Thread.sleep((DURABLE_CLIENT_TIMEOUT+5)*1000);
serverVM.invoke(InterestRegrListenerDUnitTest.class, "closeCacheTask");
}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestResultPolicyDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestResultPolicyDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestResultPolicyDUnitTest.java
index bd15080..85c6727 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestResultPolicyDUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/InterestResultPolicyDUnitTest.java
@@ -36,7 +36,7 @@ import com.gemstone.gemfire.internal.AvailablePort;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.IgnoredException;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Host;
import com.gemstone.gemfire.test.dunit.VM;
import com.gemstone.gemfire.test.dunit.Wait;
@@ -128,7 +128,7 @@ public class InterestResultPolicyDUnitTest extends DistributedTestCase
objArr[0] = InterestResultPolicy.NONE;
objArr[1] = new Integer(PREPOPULATED_ENTRIES);
vm1.invoke(InterestResultPolicyDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});
+ NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});
vm1.invoke(InterestResultPolicyDUnitTest.class, "registerInterest", objArr);
vm1.invoke(InterestResultPolicyDUnitTest.class, "verifyResult", objArr);
logger.fine("testPolicyNone END");
@@ -150,7 +150,7 @@ public class InterestResultPolicyDUnitTest extends DistributedTestCase
objArr[0] = InterestResultPolicy.KEYS;
objArr[1] = new Integer(PREPOPULATED_ENTRIES);
vm1.invoke(InterestResultPolicyDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});;
+ NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});;
vm1.invoke(InterestResultPolicyDUnitTest.class, "registerInterest", objArr);
vm1.invoke(InterestResultPolicyDUnitTest.class, "verifyResult", objArr);
logger.fine("testPolicyKeys END");
@@ -172,7 +172,7 @@ public class InterestResultPolicyDUnitTest extends DistributedTestCase
objArr[0] = InterestResultPolicy.KEYS_VALUES;
objArr[1] = new Integer(PREPOPULATED_ENTRIES);
vm1.invoke(InterestResultPolicyDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});
+ NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});
vm1.invoke(InterestResultPolicyDUnitTest.class, "registerInterest", objArr);
vm1.invoke(InterestResultPolicyDUnitTest.class, "verifyResult", objArr);
logger.fine("testPolicyKeyValues END");
@@ -197,7 +197,7 @@ public class InterestResultPolicyDUnitTest extends DistributedTestCase
/* registering for 5 extra keys */
objArr[1] = new Integer(PREPOPULATED_ENTRIES + 5);
vm1.invoke(InterestResultPolicyDUnitTest.class, "createClientCache", new Object[] {
- NetworkSupport.getServerHostName(Host.getHost(0)), new Integer(PORT)});
+ NetworkUtils.getServerHostName(Host.getHost(0)), new Integer(PORT)});
vm1.invoke(InterestResultPolicyDUnitTest.class, "registerInterest", objArr);
vm1.invoke(InterestResultPolicyDUnitTest.class, "verifyResult", objArr);
Integer cnt = (Integer)vm0.invoke(InterestResultPolicyDUnitTest.class,
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart1DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart1DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart1DUnitTest.java
index 5c2292e..704a82a 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart1DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart1DUnitTest.java
@@ -19,7 +19,7 @@ package com.gemstone.gemfire.internal.cache.tier.sockets;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -60,7 +60,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancyNotSpecifiedNonPrimaryServerFail()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 0);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 0);
verifyOrderOfEndpoints();
server2.invoke(RedundancyLevelTestBase.class, "stopServer");
//pause(5000);
@@ -108,7 +108,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
//Asif: Increased the socket read timeout to 3000 sec becoz the registering
// of keys was timing out sometimes causing fail over to EP4 cozing
// below assertion to fail
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 0, 3000, 100);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 0, 3000, 100);
assertTrue(pool.getPrimaryName().equals(SERVER1));
verifyOrderOfEndpoints();
server0.invoke(RedundancyLevelTestBase.class, "stopServer");
@@ -149,7 +149,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedNonFailoverEPFails()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
//assertTrue(pool.getRedundantNames().contains(SERVER1));
@@ -188,7 +188,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
try {
FailOverDetectionByCCU = true;
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1, 250, 500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1, 250, 500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
// assertTrue(pool.getRedundantNames()
@@ -225,7 +225,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void _testRedundancySpecifiedNonFailoverEPFailsDetectionByRegisterInterest()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250, 500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250, 500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
// assertTrue(pool.getRedundantNames()
@@ -265,7 +265,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void _testRedundancySpecifiedNonFailoverEPFailsDetectionByUnregisterInterest()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250,500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250,500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
// assertTrue(pool.getRedundantNames()
@@ -303,7 +303,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedNonFailoverEPFailsDetectionByPut()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,500,1000);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,500,1000);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
// assertTrue(pool.getRedundantNames()
@@ -343,7 +343,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedNonPrimaryEPFails()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
@@ -384,7 +384,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
try {
FailOverDetectionByCCU = true;
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1, 250, 500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1, 250, 500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
@@ -423,7 +423,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedNonPrimaryEPFailsDetectionByRegisterInterest()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250, 500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250, 500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
@@ -464,7 +464,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedNonPrimaryEPFailsDetectionByUnregisterInterest()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250,500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250,500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
@@ -504,7 +504,7 @@ public class RedundancyLevelPart1DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedNonPrimaryEPFailsDetectionByPut()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250,500);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,250,500);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/c05f6798/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart2DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart2DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart2DUnitTest.java
index 5b70314..6f31d3b 100755
--- a/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart2DUnitTest.java
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/internal/cache/tier/sockets/RedundancyLevelPart2DUnitTest.java
@@ -19,7 +19,7 @@ package com.gemstone.gemfire.internal.cache.tier.sockets;
import com.gemstone.gemfire.test.dunit.Assert;
import com.gemstone.gemfire.test.dunit.DistributedTestCase;
import com.gemstone.gemfire.test.dunit.Host;
-import com.gemstone.gemfire.test.dunit.NetworkSupport;
+import com.gemstone.gemfire.test.dunit.NetworkUtils;
import com.gemstone.gemfire.test.dunit.Wait;
import com.gemstone.gemfire.test.dunit.WaitCriterion;
@@ -62,7 +62,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedPrimaryEPFails()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
waitConnectedServers(4);
assertTrue(pool.getPrimaryName().equals(SERVER1));
assertTrue(pool.getRedundantNames().contains(SERVER2));
@@ -104,7 +104,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
try {
FailOverDetectionByCCU = true;
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000,100);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000,100);
waitConnectedServers(4);
assertTrue(pool.getPrimaryName().equals(SERVER1));
assertTrue(pool.getRedundantNames().contains(SERVER2));
@@ -143,7 +143,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedPrimaryEPFailsDetectionByRegisterInterest()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000, 100);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000, 100);
waitConnectedServers(4);
assertTrue(pool.getPrimaryName().equals(SERVER1));
assertTrue(pool.getRedundantNames().contains(SERVER2));
@@ -184,7 +184,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedPrimaryEPFailsDetectionByUnregisterInterest()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000,100);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000,100);
waitConnectedServers(4);
assertTrue(pool.getPrimaryName().equals(SERVER1));
assertTrue(pool.getRedundantNames().contains(SERVER2));
@@ -224,7 +224,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedPrimaryEPFailsDetectionByPut()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000, 100);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1,3000, 100);
waitConnectedServers(4);
assertTrue(pool.getPrimaryName().equals(SERVER1));
assertTrue(pool.getRedundantNames().contains(SERVER2));
@@ -260,7 +260,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedPrimarySecondaryEPFails()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 1);
waitConnectedServers(4);
assertEquals(1, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
@@ -301,7 +301,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedEPFails()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 2);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 2);
waitConnectedServers(4);
assertEquals(2, pool.getRedundantNames().size());
assertTrue(pool.getPrimaryName().equals(SERVER1));
@@ -362,7 +362,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
// make sure that the client connects to only two servers and
// redundancyLevel
// unsatisfied with one
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 2);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 2);
// let the client connect to servers
//pause(10000);
verifyLiveAndRedundantServers(2, 1);
@@ -417,7 +417,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
try {
// TODO: Yogesh
server1.invoke(RedundancyLevelTestBase.class, "stopServer");
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 2);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 2);
// let the client connect to servers
//pause(10000);
verifyLiveAndRedundantServers(3, 2);
@@ -457,7 +457,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
try {
// TODO: Yogesh
server2.invoke(RedundancyLevelTestBase.class, "stopServer");
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, -1/* not specified */);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, -1/* not specified */);
// let the client connect to servers
//pause(10000);
verifyLiveAndRedundantServers(3, 2);
@@ -522,7 +522,7 @@ public class RedundancyLevelPart2DUnitTest extends RedundancyLevelTestBase
public void testRedundancySpecifiedMoreThanEPs()
{
try {
- createClientCache(NetworkSupport.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 5);
+ createClientCache(NetworkUtils.getServerHostName(Host.getHost(0)), PORT1, PORT2, PORT3, PORT4, 5);
assertEquals(3, pool.getRedundantNames().size());
server0.invoke(RedundancyLevelTestBase.class, "verifyCCP");
server1.invoke(RedundancyLevelTestBase.class, "verifyCCP");