You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by up...@apache.org on 2016/02/22 19:35:30 UTC
[08/83] [abbrv] [partial] incubator-geode git commit: Merge
remote-tracking branch 'origin/develop' into feature/GEODE-917
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/5beaaedc/geode-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
----------------------------------------------------------------------
diff --cc geode-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
index 44f7f06,0000000..cf6226d
mode 100644,000000..100644
--- a/geode-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/cache30/DistAckMapMethodsDUnitTest.java
@@@ -1,707 -1,0 +1,705 @@@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * DistAckMapMethodsDUnitTest.java
+ *
+ * Created on August 4, 2005, 12:36 PM
+ */
+package com.gemstone.gemfire.cache30;
+
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.CacheListener;
+import com.gemstone.gemfire.cache.CacheWriter;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.RegionDestroyedException;
+import com.gemstone.gemfire.cache.Scope;
+import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
+import com.gemstone.gemfire.cache.util.CacheWriterAdapter;
+import com.gemstone.gemfire.distributed.DistributedSystem;
+import com.gemstone.gemfire.test.dunit.DistributedTestCase;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.Invoke;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.SerializableRunnable;
+import com.gemstone.gemfire.test.dunit.VM;
+
+/**
+ *
+ * @author prafulla
+ */
+public class DistAckMapMethodsDUnitTest extends DistributedTestCase{
+ static Cache cache;
+ static Properties props = new Properties();
+ static DistributedSystem ds = null;
+ static Region region;
+ static Region mirroredRegion;
+ static Region remRegion;
+ static boolean afterDestroy=false;
+
+
+ //helper class referece objects
+ static Object afterDestroyObj;
+
+ /** Creates a new instance of DistAckMapMethodsDUnitTest */
+ public DistAckMapMethodsDUnitTest(String name) {
+ super(name);
+ }
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "createCache");
- vm1.invoke(DistAckMapMethodsDUnitTest.class, "createCache");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.createCache());
++ vm1.invoke(() -> DistAckMapMethodsDUnitTest.createCache());
+ }
+
+ @Override
+ protected final void preTearDown() throws Exception {
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "closeCache");
- vm1.invoke(DistAckMapMethodsDUnitTest.class, "closeCache");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.closeCache());
++ vm1.invoke(() -> DistAckMapMethodsDUnitTest.closeCache());
+ cache = null;
+ Invoke.invokeInEveryVM(new SerializableRunnable() { public void run() { cache = null; } });
+ }
+
+ public static void createCache(){
+ try{
+ //props.setProperty("mcast-port", "1234");
+ //ds = DistributedSystem.connect(props);
+ ds = (new DistAckMapMethodsDUnitTest("temp")).getSystem(props);
+ cache = CacheFactory.create(ds);
+ AttributesFactory factory = new AttributesFactory();
+ factory.setScope(Scope.DISTRIBUTED_ACK);
+ RegionAttributes attr = factory.create();
+ region = cache.createRegion("map", attr);
+ } catch (Exception ex){
+ ex.printStackTrace();
+ }
+ }
+
+ public static void closeCache(){
+ try{
+ cache.close();
+ ds.disconnect();
+ } catch (Exception ex){
+ ex.printStackTrace();
+ }
+
+ }
+
+ public static void createMirroredRegion(){
+ try{
+ AttributesFactory factory1 = new AttributesFactory();
+ factory1.setScope(Scope.DISTRIBUTED_ACK);
+ factory1.setDataPolicy(DataPolicy.REPLICATE);
+ RegionAttributes attr1 = factory1.create();
+ mirroredRegion = cache.createRegion("mirrored", attr1);
+
+ } catch (Exception ex){
+ ex.printStackTrace();
+ }
+ }
+
+ public static void createRegionToTestRemove(){
+ try{
+ AttributesFactory factory2 = new AttributesFactory();
+ factory2.setScope(Scope.DISTRIBUTED_ACK);
+ CacheWriter cacheWriter = new RemoveCacheWriter();
+ CacheListener cacheListener = new RemoveCacheListener();
+ factory2.setCacheWriter(cacheWriter);
+ factory2.setCacheListener(cacheListener);
+ RegionAttributes attr2 = factory2.create();
+ remRegion = cache.createRegion("remove", attr2);
+
+ } catch (Exception ex){
+ ex.printStackTrace();
+ }
+ }
+
+ //testMethods
+
+ public void testPutMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+ Object obj1;
+ //put from one and get from other
+ int i=1;
+ Object[] objArr = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
+ obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);
+ if(obj1 == null ){
+ fail("region.put(key, value) from one vm does not match with region.get(key) from other vm");
+ }
+
+ //put from both vms for same key
+ i = 2;
+ objArr[0] = ""+i;
+ //in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
+ obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
+ if(obj1 != null){//here if some dummy object is returned on first time put then that should be checked
+ fail("failed while region.put from both vms for same key");
+ }
+ }
+
+ public void testRemoveMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+ Object obj1, obj2;
+ boolean ret;
+ //put from one and get from other
+ int i=1;
+ Object objArr[] = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "removeMethod", objArr);
+ //validate if vm0 has that key value entry
- ret = vm0.invokeBoolean(DistAckMapMethodsDUnitTest.class, "containsKeyMethod", objArr);
++ ret = vm0.invoke(() -> containsKeyMethod("" + i));
+ if( ret ){//if returned true means that the key is still there
+ fail("region.remove failed with distributed ack scope");
+ }
+
+ //test if the correct value is returned
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
+ obj1 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);//to make sure that vm1 region has the entry
+ obj2 = vm1.invoke(DistAckMapMethodsDUnitTest.class, "removeMethod", objArr);
+ LogWriterUtils.getLogWriter().fine("111111111"+obj1);
+ LogWriterUtils.getLogWriter().fine("2222222222"+obj2);
+ if (obj1 == null)
+ fail("region1.getMethod returned null");
+ if(!(obj1.equals(obj2))){
+ fail("region.remove failed with distributed ack scope");
+ }
+ }
+
+ public void testRemoveMethodDetails(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "createRegionToTestRemove");
- vm1.invoke(DistAckMapMethodsDUnitTest.class, "createRegionToTestRemove");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.createRegionToTestRemove());
++ vm1.invoke(() -> DistAckMapMethodsDUnitTest.createRegionToTestRemove());
+
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "removeMethodDetails");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.removeMethodDetails());
+ vm1.invoke(new CacheSerializableRunnable("testRemoveMethodDetails"){
+ public void run2() throws CacheException {
+ Object ob1 = remRegion.get(new Integer(1));
+ assertEquals("beforeDestroy", ob1.toString());
+ //wait till listeber switches afterDestroy to true
+ // while(!afterDestroy){
+ // //wait
+ // }
+ assertEquals("afterDestroy", remRegion.get(new Integer(3)).toString());
+ }
+ }
+ );
+ }//end of testRemoveMethodDetails
+
+ public void testIsEmptyMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+// boolean ret;
+ //put from one and get from other
+ int i=1;
+ Object objArr[] = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
- boolean val = vm1.invokeBoolean(DistAckMapMethodsDUnitTest.class, "isEmptyMethod");
++ boolean val = vm1.invoke(() -> DistAckMapMethodsDUnitTest.isEmptyMethod());
+ if (!val){//val should be true
+ fail("Failed in region.isEmpty");
+ }
+
+ vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);
- boolean val1 = vm1.invokeBoolean(DistAckMapMethodsDUnitTest.class, "isEmptyMethod");
++ boolean val1 = vm1.invoke(() -> DistAckMapMethodsDUnitTest.isEmptyMethod());
+ if (val1){
+ fail("Failed in region.isEmpty");
+ }
+ }
+
+ public void testContainsValueMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+// boolean ret;
+ //put from one and get from other
+ int i=1;
+ Object objArr[] = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
- Object ob[] = new Object[1];
- ob[0] = "first";
- boolean val = vm1.invokeBoolean(DistAckMapMethodsDUnitTest.class, "containsValueMethod", ob);
++ boolean val = vm1.invoke(() -> containsValueMethod("first"));
+ if (val){//val should be false.
+ fail("Failed in region.ContainsValue");
+ }
+
+ vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);
- boolean val1 = vm1.invokeBoolean(DistAckMapMethodsDUnitTest.class, "containsValueMethod", ob);
++ boolean val1 = vm1.invoke(() -> containsValueMethod("first"));
+ if (!val1){//val1 should be true.
+ fail("Failed in region.ContainsValue");
+ }
+ }
+
+ public void testKeySetMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+
+ int i=1;
+ Object objArr[] = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
- int temp = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "keySetMethod");
++ int temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.keySetMethod());
+ if (temp != 0){
+ fail("failed in keySetMethodtest method");
+ }
+
+ vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);//to make sure that vm1 region has the entry
- temp = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "keySetMethod");
++ temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.keySetMethod());
+ if (temp == 0){
+ fail("failed in keySetMethodtest method");
+ }
+ //in the above scenarion we can test this for mirrorred region scenarion as well
+ temp=0;
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "createMirroredRegion");
- vm1.invoke(DistAckMapMethodsDUnitTest.class, "createMirroredRegion");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
++ vm1.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
- temp = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "keySetMethod");
++ temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.keySetMethod());
+ if (temp == 0){
+ fail("failed in keySetMethodtest method");
+ }
+ }
+
+
+ public void testEntrySetMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+
+ int i=1;
+ Object objArr[] = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
- int temp = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "entrySetMethod");
++ int temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.entrySetMethod());
+ if (temp != 0){
+ fail("failed in entrySetMethodtest method");
+ }
+
+ vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);//to make sure that vm1 region has the entry
- temp = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "entrySetMethod");
++ temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.entrySetMethod());
+ if (temp == 0){
+ fail("failed in entrySetMethodtest method");
+ }
+ //in the above scenarion we can test this for mirrorred region scenarion as well
+ temp=0;
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "createMirroredRegion");
- vm1.invoke(DistAckMapMethodsDUnitTest.class, "createMirroredRegion");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
++ vm1.invoke(() -> DistAckMapMethodsDUnitTest.createMirroredRegion());
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putOnMirroredRegion", objArr);
- temp = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "entrySetMethod");
++ temp = vm1.invoke(() -> DistAckMapMethodsDUnitTest.entrySetMethod());
+ if (temp == 0){
+ fail("failed in entrySetMethodtest method");
+ }
+ }
+
+ public void testSizeMethod(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+
+ int i=1, j=0;
+ Object objArr[] = new Object[1];
+ objArr[0] = ""+i;
+ //Integer in = new Integer(i);
+ //objArr[0] = (Object) in;
+ vm0.invoke(DistAckMapMethodsDUnitTest.class, "putMethod", objArr);
- j = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "sizeMethod");
++ j = vm1.invoke(() -> DistAckMapMethodsDUnitTest.sizeMethod());
+ if( j != 0){
+ fail("failed in region.size method");
+ }
+
+ vm1.invoke(DistAckMapMethodsDUnitTest.class, "getMethod", objArr);//to make sure that vm1 region has the entry
- j = vm1.invokeInt(DistAckMapMethodsDUnitTest.class, "sizeMethod");
++ j = vm1.invoke(() -> DistAckMapMethodsDUnitTest.sizeMethod());
+ if( j == 0){
+ fail("failed in region.size method");
+ }
+ }
+
+ public void testallMethodsArgs(){
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
- vm0.invoke(DistAckMapMethodsDUnitTest.class, "allMethodsArgs");
++ vm0.invoke(() -> DistAckMapMethodsDUnitTest.allMethodsArgs());
+ }
+
+
+ //following is the implementation of the methods of Map to use in dunit test cases.
+ /*
+ *
+ *
+ */
+
+ public static Object putMethod(Object ob){
+ Object obj=null;
+ try{
+ if(ob != null){
+ String str = "first";
+ obj = region.put(ob, str);
+ }
+ }catch(Exception ex){
+ fail("Failed while region.put");
+ }
+ return obj;
+ }
+
+ public static Object getMethod(Object ob){
+ Object obj=null;
+ try{
+ obj = region.get(ob);
+
+ } catch(Exception ex){
+ fail("Failed while region.get");
+ }
+ return obj;
+ }
+
+ public static Object removeMethod(Object ob){
+ Object obj=null;
+ try{
+ obj = region.remove(ob);
+ }catch(Exception ex){
+ ex.printStackTrace();
+ fail("Failed while region.remove");
+ }
+ return obj;
+ }
+
+ public static boolean containsKeyMethod(Object ob){
+ boolean flag = false;
+ try{
+ flag = region.containsKey(ob);
+ }catch(Exception ex){
+ fail("Failed while region.containsKey");
+ }
+ return flag;
+ }
+
+ public static boolean isEmptyMethod(){
+ boolean flag = false;
+ try{
+ flag = region.isEmpty();
+ }catch(Exception ex){
+ fail("Failed while region.isEmpty");
+ }
+ return flag;
+ }
+
+ public static boolean containsValueMethod(Object ob){
+ boolean flag = false;
+ try{
+ flag = region.containsValue(ob);
+ }catch(Exception ex){
+ fail("Failed while region.containsValueMethod");
+ }
+ return flag;
+ }
+
+ public static int keySetMethod(){
+ Set set = new HashSet();
+ int i=0;
+ try{
+ set = region.keySet();
+ i = set.size();
+ }catch(Exception ex){
+ ex.printStackTrace();
+ fail("Failed while region.keySet");
+ }
+ return i;
+ }
+
+ public static int entrySetMethod(){
+ Set set = new HashSet();
+ int i=0;
+ try{
+ set = region.entrySet();
+ i = set.size();
+ }catch(Exception ex){
+ ex.printStackTrace();
+ fail("Failed while region.entrySet");
+ }
+ return i;
+ }
+
+ public static int sizeMethod(){
+ int i=0;
+ try{
+ i = region.size();
+ }catch(Exception ex){
+ fail("Failed while region.size");
+ }
+ return i;
+ }
+
+ //following are methods for put on and get from mirrored regions
+
+ public static Object putOnMirroredRegion(Object ob){
+ Object obj=null;
+ try{
+ String str = "mirror";
+ obj = mirroredRegion.put(ob, str);
+ }catch(Exception ex){
+ ex.printStackTrace();
+ fail("Failed while mirroredRegion.put");
+ }
+ return obj;
+ }
+
+ public static Object getFromMirroredRegion(Object ob){
+ Object obj=null;
+ try{
+ obj = mirroredRegion.get(ob);
+
+ } catch(Exception ex){
+ fail("Failed while mirroredRegion.get");
+ }
+ return obj;
+ }
+
+ public static void removeMethodDetails(){
+ Object ob1;
+// Object ob2;
+ Integer inOb1 = new Integer(1);
+ try{
+ region.put(inOb1, "first");
+ ob1 = region.remove(inOb1);
+ assertEquals("first", ob1.toString());
+ }catch(Exception ex){
+ ex.printStackTrace();
+ }
+
+ //to test EntryNotFoundException
+ try{
+ region.remove(new Integer(2));
+ //fail("Should have thrown EntryNotFoundException");
+ }//catch (EntryNotFoundException e){
+ catch (Exception e){
+ //pass
+ //e.printStackTrace();
+ }
+
+ //to test NullPointerException
+ try{
+ Integer inOb2 = new Integer(2);
+ region.put(inOb2, "second");
+ inOb2 = null;
+ region.remove(inOb2);
+ fail("Should have thrown NullPointerException ");
+ }catch (NullPointerException e){
+ //pass
+ }
+
+ //to test the cache writers and listeners
+ try {
+ //createRegionToTestRemove();
+ Integer inOb2 = new Integer(2);
+ remRegion.put(inOb2, "second");
+ remRegion.remove(inOb2);
+
+ //to test cacheWriter
+ inOb2 = new Integer(1);
+ assertEquals("beforeDestroy", remRegion.get(inOb2).toString());
+
+ //wait till listeber switches afterDestroy to true
+ while(!afterDestroy){
+ }
+ //to test cacheListener
+ inOb2 = new Integer(3);
+ assertEquals("afterDestroy", remRegion.get(inOb2).toString());
+
+ //verify that entryEventvalue is correct for listener
+ assertNotNull(afterDestroyObj);
+
+ }catch (Exception ex){
+ ex.printStackTrace();
+ }
+
+
+ }//end of removeMethodDetail
+
+ public static void allMethodsArgs(){
+ //testing args for put method
+ try{
+ region.put(new Integer(1), new String("first"));
+ region.put(new Integer(2), new String("second"));
+ region.put(new Integer(3), new String("third"));
+
+ //test args for get method
+ Object ob1 = region.get(new Integer(1));
+ assertEquals("first", ob1.toString());
+
+ //test args for containsKey method
+ boolean val1 = region.containsKey(new Integer(2));
+ assertEquals(true, val1);
+
+ //test args for containsKey method
+ boolean val2 = region.containsValue(new String("second"));
+ //assertEquals(true, val2);
+
+ //test args for remove method
+ try{
+ region.remove(new Integer(3));
+ }//catch (EntryNotFoundException ex){
+ catch (Exception ex){
+ ex.printStackTrace();
+ fail("failed while region.remove(new Object())");
+ }
+
+ //verifying the correct exceptions are thrown by the methods
+
+ Object key=null, value=null;
+ //testing put method
+ try{
+ region.put(key, value);
+ fail("should have thrown NullPointerException");
+ }catch(NullPointerException iex){
+ //pass
+ }
+
+ //testing containsValue method
+ try{
+ region.containsValue(value);
+ fail("should have thrown NullPointerException");
+ }catch(NullPointerException iex){
+ //pass
+ }
+
+ //RegionDestroyedException
+ key = new Integer(5);
+ value = new String("fifth");
+
+ region.localDestroyRegion();
+ //test put method
+ try{
+ region.put(key, value);
+ fail("should have thrown RegionDestroyedException");
+ }catch(RegionDestroyedException iex){
+ //pass
+ }
+
+ //test remove method
+ try{
+ region.remove(key);
+ fail("should have thrown RegionDestroyedException");
+ }catch(RegionDestroyedException iex){
+ //pass
+ }
+
+ //test containsValue method
+ try{
+ region.containsValue(value);
+ fail("should have thrown RegionDestroyedException");
+ }catch(RegionDestroyedException iex){
+ //pass
+ }
+
+ //test size method
+ try{
+ region.size();
+ fail("should have thrown RegionDestroyedException");
+ }catch(RegionDestroyedException iex){
+ //pass
+ }
+
+ //test keySet method
+ try{
+ region.keySet();
+ fail("should have thrown RegionDestroyedException");
+ }catch(RegionDestroyedException iex){
+ //pass
+ }
+
+ //test entrySet method
+ try{
+ region.entrySet();
+ fail("should have thrown RegionDestroyedException");
+ }catch(RegionDestroyedException iex){
+ //pass
+ }
+
+
+ } catch(Exception ex){
+ ex.printStackTrace();
+ }
+
+ }//end of allMethodsArgs
+
+ //helper classes
+
+ static class RemoveCacheWriter extends CacheWriterAdapter{
+
+ public void beforeDestroy(EntryEvent entryEvent) throws com.gemstone.gemfire.cache.CacheWriterException {
+ Integer o1 = new Integer(1);
+ remRegion.put(o1, "beforeDestroy");
+ }
+
+ }//end of RemoveCacheWriter
+
+
+ static class RemoveCacheListener extends CacheListenerAdapter{
+
+ public void afterDestroy(EntryEvent entryEvent) throws com.gemstone.gemfire.cache.CacheWriterException {
+ Integer o1 = new Integer(3);
+ remRegion.put(o1, "afterDestroy");
+
+ afterDestroyObj = entryEvent.getKey();
+
+ //to continue main thread where region.remove has actually occurred
+ afterDestroy = true;
+ }
+
+ }//end of RemoveCacheListener
+
+
+}//end of class
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/5beaaedc/geode-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
----------------------------------------------------------------------
diff --cc geode-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
index a1cc2cd,0000000..f6836ae
mode 100644,000000..100644
--- a/geode-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/cache30/DistributedNoAckRegionCCEDUnitTest.java
@@@ -1,586 -1,0 +1,586 @@@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.cache30;
+
+import java.util.Map;
+import java.util.Properties;
+
+import com.gemstone.gemfire.cache.AttributesFactory;
+import com.gemstone.gemfire.cache.CacheException;
+import com.gemstone.gemfire.cache.CacheListener;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.EntryEvent;
+import com.gemstone.gemfire.cache.EntryNotFoundException;
+import com.gemstone.gemfire.cache.RegionAttributes;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.cache.Scope;
+import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
+import com.gemstone.gemfire.internal.cache.LocalRegion;
+import com.gemstone.gemfire.test.dunit.Assert;
+import com.gemstone.gemfire.test.dunit.AsyncInvocation;
+import com.gemstone.gemfire.test.dunit.Host;
+import com.gemstone.gemfire.test.dunit.LogWriterUtils;
+import com.gemstone.gemfire.test.dunit.SerializableRunnable;
+import com.gemstone.gemfire.test.dunit.VM;
+import com.gemstone.gemfire.test.dunit.Wait;
+
+public class DistributedNoAckRegionCCEDUnitTest extends
+ DistributedNoAckRegionDUnitTest {
+
+ static volatile boolean ListenerBlocking;
+
+ public DistributedNoAckRegionCCEDUnitTest(String name) {
+ super(name);
+ }
+
+ @Override
+ public Properties getDistributedSystemProperties() {
+ Properties p = super.getDistributedSystemProperties();
+ p.put(DistributionConfig.CONSERVE_SOCKETS_NAME, "false");
+ if (distributedSystemID > 0) {
+ p.put(DistributionConfig.DISTRIBUTED_SYSTEM_ID_NAME, ""+distributedSystemID);
+ }
+ p.put(DistributionConfig.SOCKET_BUFFER_SIZE_NAME, ""+2000000);
+ return p;
+ }
+
+
+ /**
+ * Returns region attributes for a <code>GLOBAL</code> region
+ */
+ protected RegionAttributes getRegionAttributes() {
+ AttributesFactory factory = new AttributesFactory();
+ factory.setScope(Scope.DISTRIBUTED_NO_ACK);
+ factory.setDataPolicy(DataPolicy.REPLICATE);
+ factory.setConcurrencyChecksEnabled(true);
+ return factory.create();
+ }
+ protected RegionAttributes getRegionAttributes(String type) {
+ RegionAttributes ra = getCache().getRegionAttributes(type);
+ if (ra == null) {
+ throw new IllegalStateException("The region shortcut " + type
+ + " has been removed.");
+ }
+ AttributesFactory factory = new AttributesFactory(ra);
+ factory.setScope(Scope.DISTRIBUTED_NO_ACK);
+ factory.setConcurrencyChecksEnabled(true);
+ return factory.create();
+ }
+
+ @Override
+ public void sendSerialMessageToAll() {
+ try {
+ com.gemstone.gemfire.distributed.internal.SerialAckedMessage msg = new com.gemstone.gemfire.distributed.internal.SerialAckedMessage();
+ msg.send(InternalDistributedSystem.getConnectedInstance().getDM().getNormalDistributionManagerIds(), false);
+ }
+ catch (Exception e) {
+ throw new RuntimeException("Unable to send serial message due to exception", e);
+ }
+ }
+
+
+ @Override
+ public void testLocalDestroy() throws InterruptedException {
+ // replicates don't allow local destroy
+ }
+
+ @Override
+ public void testEntryTtlLocalDestroy() throws InterruptedException {
+ // replicates don't allow local destroy
+ }
+
+ public void testClearWithManyEventsInFlight() throws Exception {
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+ VM vm2 = host.getVM(2);
+ VM vm3 = host.getVM(3);
+
+ // create replicated regions in VM 0 and 1, then perform concurrent ops
+ // on the same key while creating the region in VM2. Afterward make
+ // sure that all three regions are consistent
+
+ final String name = this.getUniqueName() + "-CC";
+ createRegionWithAttribute(vm0, name, false);
+ createRegionWithAttribute(vm1, name, false);
+ createRegionWithAttribute(vm2, name, false);
+ createRegionWithAttribute(vm3, name, false);
- vm0.invoke(DistributedNoAckRegionCCEDUnitTest.class, "addBlockingListener");
- vm1.invoke(DistributedNoAckRegionCCEDUnitTest.class, "addBlockingListener");
- vm2.invoke(DistributedNoAckRegionCCEDUnitTest.class, "addBlockingListener");
- AsyncInvocation vm0Ops = vm0.invokeAsync(DistributedNoAckRegionCCEDUnitTest.class, "doManyOps");
- AsyncInvocation vm1Ops = vm1.invokeAsync(DistributedNoAckRegionCCEDUnitTest.class, "doManyOps");
- AsyncInvocation vm2Ops = vm2.invokeAsync(DistributedNoAckRegionCCEDUnitTest.class, "doManyOps");
++ vm0.invoke(() -> DistributedNoAckRegionCCEDUnitTest.addBlockingListener());
++ vm1.invoke(() -> DistributedNoAckRegionCCEDUnitTest.addBlockingListener());
++ vm2.invoke(() -> DistributedNoAckRegionCCEDUnitTest.addBlockingListener());
++ AsyncInvocation vm0Ops = vm0.invokeAsync(() -> DistributedNoAckRegionCCEDUnitTest.doManyOps());
++ AsyncInvocation vm1Ops = vm1.invokeAsync(() -> DistributedNoAckRegionCCEDUnitTest.doManyOps());
++ AsyncInvocation vm2Ops = vm2.invokeAsync(() -> DistributedNoAckRegionCCEDUnitTest.doManyOps());
+ // pause to let a bunch of operations build up
+ Wait.pause(5000);
- AsyncInvocation a0 = vm3.invokeAsync(DistributedNoAckRegionCCEDUnitTest.class, "clearRegion");
- vm0.invoke(DistributedNoAckRegionCCEDUnitTest.class, "unblockListener");
- vm1.invoke(DistributedNoAckRegionCCEDUnitTest.class, "unblockListener");
- vm2.invoke(DistributedNoAckRegionCCEDUnitTest.class, "unblockListener");
++ AsyncInvocation a0 = vm3.invokeAsync(() -> DistributedNoAckRegionCCEDUnitTest.clearRegion());
++ vm0.invoke(() -> DistributedNoAckRegionCCEDUnitTest.unblockListener());
++ vm1.invoke(() -> DistributedNoAckRegionCCEDUnitTest.unblockListener());
++ vm2.invoke(() -> DistributedNoAckRegionCCEDUnitTest.unblockListener());
+ waitForAsyncProcessing(a0, "");
+ waitForAsyncProcessing(vm0Ops, "");
+ waitForAsyncProcessing(vm1Ops, "");
+ waitForAsyncProcessing(vm2Ops, "");
+
+// if (a0failed && a1failed) {
+// fail("neither member saw event conflation - check stats for " + name);
+// }
+ Wait.pause(2000);//this test has with noack, thus we should wait before validating entries
+ // check consistency of the regions
- Map r0Contents = (Map)vm0.invoke(this.getClass(), "getCCRegionContents");
- Map r1Contents = (Map)vm1.invoke(this.getClass(), "getCCRegionContents");
- Map r2Contents = (Map)vm2.invoke(this.getClass(), "getCCRegionContents");
- Map r3Contents = (Map)vm3.invoke(this.getClass(), "getCCRegionContents");
++ Map r0Contents = (Map)vm0.invoke(() -> this.getCCRegionContents());
++ Map r1Contents = (Map)vm1.invoke(() -> this.getCCRegionContents());
++ Map r2Contents = (Map)vm2.invoke(() -> this.getCCRegionContents());
++ Map r3Contents = (Map)vm3.invoke(() -> this.getCCRegionContents());
+
+ for (int i=0; i<10; i++) {
+ String key = "cckey" + i;
+ assertEquals("region contents are not consistent", r0Contents.get(key), r1Contents.get(key));
+ assertEquals("region contents are not consistent", r1Contents.get(key), r2Contents.get(key));
+ assertEquals("region contents are not consistent", r2Contents.get(key), r3Contents.get(key));
+ for (int subi=1; subi<3; subi++) {
+ String subkey = key + "-" + subi;
+ assertEquals("region contents are not consistent", r0Contents.get(subkey), r1Contents.get(subkey));
+ assertEquals("region contents are not consistent", r1Contents.get(subkey), r2Contents.get(subkey));
+ assertEquals("region contents are not consistent", r2Contents.get(subkey), r3Contents.get(subkey));
+ }
+ }
+ }
+
+ static void addBlockingListener() {
+ ListenerBlocking = true;
+ CCRegion.getAttributesMutator().addCacheListener(new CacheListenerAdapter(){
+ public void afterCreate(EntryEvent event) {
+ onEvent(event);
+ }
+ private void onEvent(EntryEvent event) {
+ boolean blocked = false;
+ if (event.isOriginRemote()) {
+ synchronized(this) {
+ while (ListenerBlocking) {
+ LogWriterUtils.getLogWriter().info("blocking cache operations for " + event.getDistributedMember());
+ blocked = true;
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ LogWriterUtils.getLogWriter().info("blocking cache listener interrupted");
+ return;
+ }
+ }
+ }
+ if (blocked) {
+ LogWriterUtils.getLogWriter().info("allowing cache operations for " + event.getDistributedMember());
+ }
+ }
+ }
+ @Override
+ public void close() {
+ LogWriterUtils.getLogWriter().info("closing blocking listener");
+ ListenerBlocking = false;
+ synchronized(this) {
+ notifyAll();
+ }
+ }
+ @Override
+ public void afterUpdate(EntryEvent event) {
+ onEvent(event);
+ }
+ @Override
+ public void afterInvalidate(EntryEvent event) {
+ onEvent(event);
+ }
+ @Override
+ public void afterDestroy(EntryEvent event) {
+ onEvent(event);
+ }
+ });
+ }
+
+ static void doManyOps() {
+ // do not include putAll, which requires an Ack to detect failures
+ doOpsLoopNoFlush(5000, false, false);
+ }
+
+ static void unblockListener() {
+ CacheListener listener = CCRegion.getCacheListener();
+ ListenerBlocking = false;
+ synchronized(listener) {
+ listener.notifyAll();
+ }
+ }
+
+ static void clearRegion() {
+ CCRegion.clear();
+ }
+
+ /**
+ * This test creates a server cache in vm0 and a peer cache in vm1.
+ * It then tests to see if GII transferred tombstones to vm1 like it's supposed to.
+ * A client cache is created in vm2 and the same sort of check is performed
+ * for register-interest.
+ */
+
+ public void testGIISendsTombstones() throws Exception {
+ versionTestGIISendsTombstones();
+ }
+
+
+ protected void do_version_recovery_if_necessary(final VM vm0, final VM vm1, final VM vm2, final Object[] params) {
+ // do nothing here
+ }
+
+ /**
+ * This tests the concurrency versioning system to ensure that event conflation
+ * happens correctly and that the statistic is being updated properly
+ */
+ public void testConcurrentEvents() throws Exception {
+ versionTestConcurrentEvents();
+ }
+
+
+ public void testClearWithConcurrentEvents() throws Exception {
+ // need to figure out how to flush clear() ops for verification steps
+ }
+
+ public void testClearWithConcurrentEventsAsync() throws Exception {
+ // need to figure out how to flush clear() ops for verification steps
+ }
+
+ public void testClearOnNonReplicateWithConcurrentEvents() throws Exception {
+ // need to figure out how to flush clear() ops for verification steps
+ }
+
+
+ public void testTombstones() throws Exception {
+// for (int i=0; i<1000; i++) {
+// System.out.println("starting run #"+i);
+ versionTestTombstones();
+// if (i < 999) {
+// tearDown();
+// setUp();
+// }
+// }
+ }
+
+
+
+
+
+
+
+ public void testOneHopKnownIssues() {
+ Host host = Host.getHost(0);
+ VM vm0 = host.getVM(0);
+ VM vm1 = host.getVM(1);
+ VM vm2 = host.getVM(2);
+ VM vm3 = host.getVM(3); // this VM, but treat as a remote for uniformity
+
+ // create an empty region in vm0 and replicated regions in VM 1 and 3,
+ // then perform concurrent ops
+ // on the same key while creating the region in VM2. Afterward make
+ // sure that all three regions are consistent
+
+ final String name = this.getUniqueName() + "-CC";
+ SerializableRunnable createRegion = new SerializableRunnable("Create Region") {
+ public void run() {
+ try {
+ final RegionFactory f;
+ int vmNumber = VM.getCurrentVMNum();
+ switch (vmNumber) {
+ case 0:
+ f = getCache().createRegionFactory(getRegionAttributes(RegionShortcut.REPLICATE_PROXY.toString()));
+ break;
+ case 1:
+ f = getCache().createRegionFactory(getRegionAttributes(RegionShortcut.REPLICATE.toString()));
+ f.setDataPolicy(DataPolicy.NORMAL);
+ break;
+ default:
+ f = getCache().createRegionFactory(getRegionAttributes());
+ break;
+ }
+ CCRegion = (LocalRegion)f.create(name);
+ } catch (CacheException ex) {
+ Assert.fail("While creating region", ex);
+ }
+ }
+ };
+
+ vm0.invoke(createRegion); // empty
+ vm1.invoke(createRegion); // normal
+ vm2.invoke(createRegion); // replicate
+
+ // case 1: entry already invalid on vm2 (replicate) is invalidated by vm0 (empty)
+ final String invalidationKey = "invalidationKey";
+ final String destroyKey = "destroyKey";
+ SerializableRunnable test = new SerializableRunnable("case 1: second invalidation not applied or distributed") {
+ public void run() {
+ CCRegion.put(invalidationKey, "initialValue");
+
+ int invalidationCount = CCRegion.getCachePerfStats().getInvalidates();
+ CCRegion.invalidate(invalidationKey);
+ CCRegion.invalidate(invalidationKey);
+ assertEquals(invalidationCount+1, CCRegion.getCachePerfStats().getInvalidates());
+
+ // also test destroy() while we're at it. It should throw an exception
+ int destroyCount = CCRegion.getCachePerfStats().getDestroys();
+ CCRegion.destroy(invalidationKey);
+ try {
+ CCRegion.destroy(invalidationKey);
+ fail("expected an EntryNotFoundException");
+ } catch (EntryNotFoundException e) {
+ // expected
+ }
+ assertEquals(destroyCount+1, CCRegion.getCachePerfStats().getDestroys());
+ }
+ };
+ vm0.invoke(test);
+
+ // now do the same with the datapolicy=normal region
+ test.setName("case 2: second invalidation not applied or distributed");
+ vm1.invoke(test);
+ }
+
+ /**
+ * This tests the concurrency versioning system to ensure that event conflation
+ * happens correctly and that the statistic is being updated properly
+ */
+ public void testConcurrentEventsOnEmptyRegion() {
+ versionTestConcurrentEventsOnEmptyRegion();
+ }
+
+
+
+
+ /**
+ * This tests the concurrency versioning system to ensure that event conflation
+ * happens correctly and that the statistic is being updated properly
+ */
+ public void testConcurrentEventsOnNonReplicatedRegion() {
+ versionTestConcurrentEventsOnNonReplicatedRegion();
+ }
+
+
+ public void testGetAllWithVersions() {
+ versionTestGetAllWithVersions();
+ }
+
+
+
+ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ // these methods can be uncommented to inhibit test execution
+ // when new tests are added
+ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+// @Override
+// public void testNonblockingGetInitialImage() throws Throwable {
+// }
+// @Override
+// public void testConcurrentOperations() throws Exception {
+// }
+//
+// @Override
+// public void testDistributedUpdate() {
+// }
+//
+// @Override
+// public void testDistributedGet() {
+// }
+//
+// @Override
+// public void testDistributedPutNoUpdate() throws InterruptedException {
+// }
+//
+// @Override
+// public void testDefinedEntryUpdated() {
+// }
+//
+// @Override
+// public void testDistributedDestroy() throws InterruptedException {
+// }
+//
+// @Override
+// public void testDistributedRegionDestroy() throws InterruptedException {
+// }
+//
+// @Override
+// public void testLocalRegionDestroy() throws InterruptedException {
+// }
+//
+// @Override
+// public void testDistributedInvalidate() {
+// }
+//
+// @Override
+// public void testDistributedInvalidate4() throws InterruptedException {
+// }
+//
+// @Override
+// public void testDistributedRegionInvalidate() throws InterruptedException {
+// }
+//
+// @Override
+// public void testRemoteCacheListener() throws InterruptedException {
+// }
+//
+// @Override
+// public void testRemoteCacheListenerInSubregion() throws InterruptedException {
+// }
+//
+// @Override
+// public void testRemoteCacheLoader() throws InterruptedException {
+// }
+//
+// @Override
+// public void testRemoteCacheLoaderArg() throws InterruptedException {
+// }
+//
+// @Override
+// public void testRemoteCacheLoaderException() throws InterruptedException {
+// }
+//
+// @Override
+// public void testCacheLoaderWithNetSearch() throws CacheException {
+// }
+//
+// @Override
+// public void testCacheLoaderWithNetLoad() throws CacheException {
+// }
+//
+// @Override
+// public void testNoRemoteCacheLoader() throws InterruptedException {
+// }
+//
+// @Override
+// public void testNoLoaderWithInvalidEntry() {
+// }
+//
+// @Override
+// public void testRemoteCacheWriter() throws InterruptedException {
+// }
+//
+// @Override
+// public void testLocalAndRemoteCacheWriters() throws InterruptedException {
+// }
+//
+// @Override
+// public void testCacheLoaderModifyingArgument() throws InterruptedException {
+// }
+//
+// @Override
+// public void testRemoteLoaderNetSearch() throws CacheException {
+// }
+//
+// @Override
+// public void testLocalCacheLoader() {
+// }
+//
+// @Override
+// public void testDistributedPut() throws Exception {
+// }
+//
+// @Override
+// public void testReplicate() throws InterruptedException {
+// }
+//
+// @Override
+// public void testDeltaWithReplicate() throws InterruptedException {
+// }
+//
+// @Override
+// public void testGetInitialImage() {
+// }
+//
+// @Override
+// public void testLargeGetInitialImage() {
+// }
+//
+// @Override
+// public void testMirroredDataFromNonMirrored() throws InterruptedException {
+// }
+//
+// @Override
+// public void testNoMirroredDataToNonMirrored() throws InterruptedException {
+// }
+//
+// @Override
+// public void testMirroredLocalLoad() {
+// }
+//
+// @Override
+// public void testMirroredNetLoad() {
+// }
+//
+// @Override
+// public void testNoRegionKeepAlive() throws InterruptedException {
+// }
+//
+// @Override
+// public void testNetSearchObservesTtl() throws InterruptedException {
+// }
+//
+// @Override
+// public void testNetSearchObservesIdleTime() throws InterruptedException {
+// }
+//
+// @Override
+// public void testEntryTtlDestroyEvent() throws InterruptedException {
+// }
+//
+// @Override
+// public void testUpdateResetsIdleTime() throws InterruptedException {
+// }
+// @Override
+// public void testTXNonblockingGetInitialImage() throws Throwable {
+// }
+//
+// @Override
+// public void testNBRegionInvalidationDuringGetInitialImage() throws Throwable {
+// }
+//
+// @Override
+// public void testNBRegionDestructionDuringGetInitialImage() throws Throwable {
+// }
+//
+// @Override
+// public void testNoDataSerializer() {
+// }
+//
+// @Override
+// public void testNoInstantiator() {
+// }
+//
+// @Override
+// public void testTXSimpleOps() throws Exception {
+// }
+//
+// @Override
+// public void testTXUpdateLoadNoConflict() throws Exception {
+// }
+//
+// @Override
+// public void testTXMultiRegion() throws Exception {
+// }
+//
+// @Override
+// public void testTXRmtMirror() throws Exception {
+// }
+
+
+}