You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by up...@apache.org on 2016/04/27 22:49:49 UTC
[03/25] incubator-geode git commit: GEODE-10: Reinstating HDFS
persistence code
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/9f3f10fd/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/HDFSRegionOperationsOffHeapJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/HDFSRegionOperationsOffHeapJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/HDFSRegionOperationsOffHeapJUnitTest.java
new file mode 100644
index 0000000..421cd28
--- /dev/null
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/HDFSRegionOperationsOffHeapJUnitTest.java
@@ -0,0 +1,78 @@
+/*=========================================================================
+ * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
+ * This product is protected by U.S. and international copyright
+ * and intellectual property laws. Pivotal products are covered by
+ * one or more patents listed at http://www.pivotal.io/patents.
+ *=========================================================================
+ */
+package com.gemstone.gemfire.internal.cache;
+
+import java.util.Iterator;
+import java.util.Properties;
+
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.PartitionAttributes;
+import com.gemstone.gemfire.cache.PartitionAttributesFactory;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.internal.util.concurrent.CustomEntryConcurrentHashMap;
+import com.gemstone.gemfire.test.junit.categories.HoplogTest;
+import com.gemstone.gemfire.test.junit.categories.IntegrationTest
+;
+
+@Category({IntegrationTest.class, HoplogTest.class})
+public class HDFSRegionOperationsOffHeapJUnitTest extends HDFSRegionOperationsJUnitTest {
+ static {
+ System.setProperty("gemfire.trackOffHeapRefCounts", "true");
+ System.setProperty("gemfire.trackOffHeapFreedRefCounts", "true");
+ }
+
+ @Override
+ protected void clearBackingCHM(Region<Integer, String> r) {
+ PartitionedRegion pr = (PartitionedRegion)r;
+ for (BucketRegion br : pr.getDataStore().getAllLocalBucketRegions()) {
+ assertTrue(br.getRegionMap() instanceof HDFSRegionMap);
+ CustomEntryConcurrentHashMap chm = ((AbstractRegionMap)br.getRegionMap())._getMap();
+ Iterator it = chm.keySet().iterator();
+ while (it.hasNext()) {
+ Object key = it.next();
+ OffHeapRegionEntry re = (OffHeapRegionEntry) chm.remove(key);
+ assert re != null;
+ re.release();
+ }
+ // wait here to make sure that the queue has been flushed
+ }
+ sleep(pr.getFullPath());
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+
+ OffHeapTestUtil.checkOrphans();
+ super.tearDown();
+ }
+ @Override
+ protected Region<Integer, String> createRegion(String regionName) {
+ RegionFactory<Integer, String> rf = cache.createRegionFactory(RegionShortcut.PARTITION_HDFS);
+ PartitionAttributes prAttr = new PartitionAttributesFactory().setTotalNumBuckets(10).create();
+ rf.setPartitionAttributes(prAttr);
+ rf.setOffHeap(true);
+ rf.setHDFSStoreName(hdfsStore.getName());
+ Region<Integer, String> r = rf.create(regionName);
+// addListener(r);
+
+ ((PartitionedRegion) r).setQueryHDFS(true);
+ return r;
+ }
+ @Override
+ protected Properties getDSProps() {
+ Properties props = super.getDSProps();
+ props.setProperty("off-heap-memory-size", "50m");
+ return props;
+ }
+
+
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/9f3f10fd/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueJUnitTest.java
index b2399fd..a7daf98 100644
--- a/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueJUnitTest.java
+++ b/geode-core/src/test/java/com/gemstone/gemfire/internal/cache/wan/parallel/ParallelGatewaySenderQueueJUnitTest.java
@@ -67,7 +67,7 @@ public class ParallelGatewaySenderQueueJUnitTest {
PartitionedRegionDataStore dataStore = mock(PartitionedRegionDataStore.class);
when(mockMetaRegion.getDataStore()).thenReturn(dataStore);
when(dataStore.getSizeOfLocalPrimaryBuckets()).thenReturn(3);
- when(metaRegionFactory.newMetataRegion(any(), any(), any(), any())).thenReturn(mockMetaRegion);
+ when(metaRegionFactory.newMetataRegion(any(), any(), any(), any(), anyBoolean())).thenReturn(mockMetaRegion);
when(cache.createVMRegion(any(), any(), any())).thenReturn(mockMetaRegion);
queue.addShadowPartitionedRegionForUserPR(mockPR("region1"));
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/9f3f10fd/geode-core/src/test/java/com/gemstone/gemfire/management/bean/stats/HDFSRegionMBeanAttributeJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/management/bean/stats/HDFSRegionMBeanAttributeJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/management/bean/stats/HDFSRegionMBeanAttributeJUnitTest.java
new file mode 100644
index 0000000..38145d1
--- /dev/null
+++ b/geode-core/src/test/java/com/gemstone/gemfire/management/bean/stats/HDFSRegionMBeanAttributeJUnitTest.java
@@ -0,0 +1,169 @@
+/*=========================================================================
+ * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
+ * This product is protected by U.S. and international copyright
+ * and intellectual property laws. Pivotal products are covered by
+ * one or more patents listed at http://www.pivotal.io/patents.
+ *=========================================================================
+ */
+package com.gemstone.gemfire.management.bean.stats;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Set;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.hfile.BlockCache;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.Operation;
+import com.gemstone.gemfire.cache.PartitionAttributesFactory;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.cache.hdfs.HDFSStoreFactory;
+import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl;
+import com.gemstone.gemfire.cache.hdfs.internal.SortedHDFSQueuePersistedEvent;
+import com.gemstone.gemfire.cache.hdfs.internal.hoplog.HoplogConfig;
+import com.gemstone.gemfire.cache.hdfs.internal.hoplog.HoplogOrganizer;
+import com.gemstone.gemfire.internal.cache.BucketRegion;
+import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
+import com.gemstone.gemfire.internal.cache.PartitionedRegion;
+import com.gemstone.gemfire.internal.cache.execute.BucketMovedException;
+import com.gemstone.gemfire.internal.cache.persistence.soplog.HFileStoreStatistics;
+import com.gemstone.gemfire.internal.cache.persistence.soplog.SortedOplogStatistics;
+import com.gemstone.gemfire.internal.cache.versions.DiskVersionTag;
+import com.gemstone.gemfire.internal.util.BlobHelper;
+import com.gemstone.gemfire.management.ManagementService;
+import com.gemstone.gemfire.management.RegionMXBean;
+import com.gemstone.gemfire.management.internal.ManagementConstants;
+import com.gemstone.gemfire.test.junit.categories.HoplogTest;
+import com.gemstone.gemfire.test.junit.categories.IntegrationTest
+;
+
+/**
+ * Test for verifying HDFS related MBean attributes
+ * @author rishim
+ *
+ */
+@Category({IntegrationTest.class, HoplogTest.class})
+public class HDFSRegionMBeanAttributeJUnitTest extends TestCase {
+
+ public static final String HDFS_STORE_NAME = "HDFSMBeanJUnitTestStore";
+ public static final String REGION_NAME = "HDFSMBeanJUnitTest_Region";
+ protected Path testDataDir;
+ protected Cache cache;
+
+ protected HDFSStoreFactory hsf;
+ protected HDFSStoreImpl hdfsStore;
+ protected Region<Object, Object> region;
+ SortedOplogStatistics stats;
+ HFileStoreStatistics storeStats;
+ BlockCache blockCache;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ System.setProperty(HoplogConfig.ALLOW_LOCAL_HDFS_PROP, "true");
+ testDataDir = new Path("test-case");
+
+ cache = createCache();
+
+ configureHdfsStoreFactory();
+ hdfsStore = (HDFSStoreImpl) hsf.create(HDFS_STORE_NAME);
+
+ RegionFactory<Object, Object> regionfactory = cache.createRegionFactory(RegionShortcut.PARTITION_HDFS);
+ regionfactory.setHDFSStoreName(HDFS_STORE_NAME);
+
+ // regionfactory.setCompressionCodec("Some");
+ PartitionAttributesFactory fac = new PartitionAttributesFactory();
+ fac.setTotalNumBuckets(10);
+
+ regionfactory.setPartitionAttributes(fac.create());
+ region = regionfactory.create(REGION_NAME);
+
+ }
+
+ protected void configureHdfsStoreFactory() throws Exception {
+ hsf = this.cache.createHDFSStoreFactory();
+ hsf.setHomeDir(testDataDir.toString());
+ }
+
+ protected Cache createCache() {
+ CacheFactory cf = new CacheFactory().set("mcast-port", "0").set("log-level", "info");
+ cache = cf.create();
+ return cache;
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ hdfsStore.getFileSystem().delete(testDataDir, true);
+ cache.close();
+ super.tearDown();
+ }
+
+ public void testStoreUsageStats() throws Exception {
+
+ PartitionedRegion parRegion = (PartitionedRegion)region;
+
+
+ ArrayList<TestEvent> items = new ArrayList<TestEvent>();
+ for (int i = 0; i < 100; i++) {
+ String key = ("key-" + (i * 100 + i));
+ String value = ("value-" + System.nanoTime());
+ parRegion.put(key, value);
+
+ items.add(new TestEvent(key, value));
+ }
+
+ //Dont want to create
+ Set<BucketRegion> localPrimaryBucketRegions = parRegion.getDataStore().getAllLocalPrimaryBucketRegions();
+ BucketRegion flushingBucket= localPrimaryBucketRegions.iterator().next();
+ HoplogOrganizer hoplogOrganizer = getOrganizer(parRegion,flushingBucket.getId());
+ hoplogOrganizer.flush(items.iterator(), 100);
+
+ GemFireCacheImpl cache = GemFireCacheImpl.getExisting();
+ ManagementService service = ManagementService.getExistingManagementService(cache);
+ RegionMXBean bean = service.getLocalRegionMBean(region.getFullPath());
+
+
+ //assertTrue(bean.getEntryCount() == ManagementConstants.ZERO);
+ assertTrue(bean.getEntrySize() == ManagementConstants.NOT_AVAILABLE_LONG);
+ assertTrue(0 < bean.getDiskUsage());
+
+ }
+
+
+ private HoplogOrganizer getOrganizer(PartitionedRegion region, int bucketId) {
+ BucketRegion br = region.getDataStore().getLocalBucketById(bucketId);
+ if (br == null) {
+ // got rebalanced or something
+ throw new BucketMovedException("Bucket region is no longer available. BucketId: " +
+ bucketId + " RegionPath: " + region.getFullPath());
+ }
+
+ return br.getHoplogOrganizer();
+ }
+
+
+ public static class TestEvent extends SortedHDFSQueuePersistedEvent implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ Object key;
+
+ public TestEvent(String k, String v) throws Exception {
+ this(k, v, Operation.PUT_IF_ABSENT);
+ }
+
+ public TestEvent(String k, String v, Operation op) throws Exception {
+ super(v, op, (byte) 0x02, false, new DiskVersionTag(), BlobHelper.serializeToBlob(k), 0);
+ this.key = k;
+ }
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/9f3f10fd/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/HDFSStoreCommandsJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/HDFSStoreCommandsJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/HDFSStoreCommandsJUnitTest.java
new file mode 100644
index 0000000..af47138
--- /dev/null
+++ b/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/HDFSStoreCommandsJUnitTest.java
@@ -0,0 +1,838 @@
+/*
+ * =========================================================================
+ * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
+ * This product is protected by U.S. and international copyright
+ * and intellectual property laws. Pivotal products are covered by
+ * more patents listed at http://www.pivotal.io/patents.
+ * ========================================================================
+ */
+
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.jmock.Expectations;
+import org.jmock.Mockery;
+import org.jmock.lib.legacy.ClassImposteriser;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.execute.Execution;
+import com.gemstone.gemfire.cache.execute.FunctionInvocationTargetException;
+import com.gemstone.gemfire.cache.execute.ResultCollector;
+import com.gemstone.gemfire.cache.hdfs.HDFSStore;
+import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder;
+import com.gemstone.gemfire.distributed.DistributedMember;
+import com.gemstone.gemfire.internal.cache.execute.AbstractExecution;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.cli.Result.Status;
+import com.gemstone.gemfire.management.internal.cli.functions.AlterHDFSStoreFunction;
+import com.gemstone.gemfire.management.internal.cli.functions.CliFunctionResult;
+import com.gemstone.gemfire.management.internal.cli.functions.CreateHDFSStoreFunction;
+import com.gemstone.gemfire.management.internal.cli.functions.DescribeHDFSStoreFunction;
+import com.gemstone.gemfire.management.internal.cli.functions.DestroyHDFSStoreFunction;
+import com.gemstone.gemfire.management.internal.cli.functions.ListHDFSStoresFunction;
+import com.gemstone.gemfire.management.internal.cli.functions.ListHDFSStoresFunction.HdfsStoreDetails;
+import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
+import com.gemstone.gemfire.management.internal.cli.json.GfJsonObject;
+import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
+import com.gemstone.gemfire.management.internal.cli.result.InfoResultData;
+import com.gemstone.gemfire.management.internal.cli.result.TabularResultData;
+import com.gemstone.gemfire.management.internal.cli.util.HDFSStoreNotFoundException;
+import com.gemstone.gemfire.management.internal.cli.util.MemberNotFoundException;
+import com.gemstone.gemfire.management.internal.configuration.domain.XmlEntity;
+import com.gemstone.gemfire.test.junit.categories.HoplogTest;
+import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
+
+/**
+ * The HDFSStoreCommandsJUnitTest class is a test suite of test cases testing
+ * the contract and functionality of the HDFSStoreCommands class implementing
+ * commands in the GemFire shell (gfsh) that access and modify hdfs stores in
+ * GemFire. </p>
+ *
+ * @author Namrata Thanvi
+ * @see com.gemstone.gemfire.management.internal.cli.commands.HDFSStoreCommands
+ * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder
+ * @see com.gemstone.gemfire.management.internal.cli.functions.DescribeHDFSStoreFunction
+ * @see org.jmock.Expectations
+ * @see org.jmock.Mockery
+ * @see org.jmock.lib.legacy.ClassImposteriser
+ * @see org.junit.Assert
+ * @see org.junit.Test
+ */
+@Category({IntegrationTest.class, HoplogTest.class})
+public class HDFSStoreCommandsJUnitTest {
+
+ private Mockery mockContext;
+
+ @Before
+ public void setUp() {
+ mockContext = new Mockery() {
+ {
+ setImposteriser(ClassImposteriser.INSTANCE);
+ }
+ };
+ }
+
+ @After
+ public void tearDown() {
+ mockContext.assertIsSatisfied();
+ mockContext = null;
+ }
+
+ @Test
+ public void testGetHDFSStoreDescription() {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+
+ final HDFSStoreConfigHolder expectedHdfsStoreConfigHolder = createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName",
+ "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40,
+ 40, 40, 800);
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getName();
+ will(returnValue(null));
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ oneOf(mockFunctionExecutor).withArgs(with(equal(hdfsStoreName)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DescribeHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(expectedHdfsStoreConfigHolder)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final HDFSStoreConfigHolder actualHdfsStoreConfigHolder = commands.getHDFSStoreDescription(memberId, hdfsStoreName);
+
+ assertNotNull(actualHdfsStoreConfigHolder);
+ assertEquals(expectedHdfsStoreConfigHolder, actualHdfsStoreConfigHolder);
+ }
+
+ @Test(expected = MemberNotFoundException.class)
+ public void testGetHDFSStoreDescriptionThrowsMemberNotFoundException() {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getName();
+ will(returnValue(null));
+ oneOf(mockMember).getId();
+ will(returnValue("testMember"));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, null);
+
+ try {
+ commands.getHDFSStoreDescription(memberId, hdfsStoreName);
+ } catch (MemberNotFoundException expected) {
+ assertEquals(CliStrings.format(CliStrings.MEMBER_NOT_FOUND_ERROR_MESSAGE, memberId), expected.getMessage());
+ throw expected;
+ }
+ }
+
+ @Test(expected = HDFSStoreNotFoundException.class)
+ public void testGetHDFSStoreDescriptionThrowsResourceNotFoundException() {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getName();
+ will(returnValue(null));
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ oneOf(mockFunctionExecutor).withArgs(with(equal(hdfsStoreName)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DescribeHDFSStoreFunction.class)));
+ will(throwException(new HDFSStoreNotFoundException("expected")));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ try {
+ commands.getHDFSStoreDescription(memberId, hdfsStoreName);
+ } catch (HDFSStoreNotFoundException expected) {
+ assertEquals("expected", expected.getMessage());
+ throw expected;
+ }
+ }
+
+ @Test(expected = RuntimeException.class)
+ public void testGetHDFSStoreDescriptionThrowsRuntimeException() {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getName();
+ will(returnValue(null));
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ oneOf(mockFunctionExecutor).withArgs(with(equal(hdfsStoreName)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DescribeHDFSStoreFunction.class)));
+ will(throwException(new RuntimeException("expected")));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ try {
+ commands.getHDFSStoreDescription(memberId, hdfsStoreName);
+ } catch (RuntimeException expected) {
+ assertEquals("expected", expected.getMessage());
+ throw expected;
+ }
+ }
+
+ @Test(expected = RuntimeException.class)
+ public void testGetHDFSStoreDescriptionWithInvalidFunctionResultReturnType() {
+ final String hdfsStoreName = "mockHDFSStore";
+ final String memberId = "mockMember";
+
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getName();
+ will(returnValue(null));
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ oneOf(mockFunctionExecutor).withArgs(with(equal(hdfsStoreName)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DescribeHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(new Object())));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ try {
+ commands.getHDFSStoreDescription(memberId, hdfsStoreName);
+ } catch (RuntimeException expected) {
+ assertEquals(CliStrings.format(CliStrings.UNEXPECTED_RETURN_TYPE_EXECUTING_COMMAND_ERROR_MESSAGE, Object.class
+ .getName(), CliStrings.DESCRIBE_HDFS_STORE), expected.getMessage());
+ assertNull(expected.getCause());
+ throw expected;
+ }
+ }
+
+ @Test
+ public void testGetHDFSStoreListing() {
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+
+ final DistributedMember mockDistributedMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+
+ final AbstractExecution mockFunctionExecutor = mockContext.mock(AbstractExecution.class, "Function Executor");
+
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+
+ final HDFSStoreConfigHolder expectedHdfsStoreConfigHolderOne = createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName1",
+ "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40,
+ 40, 40, 800);
+ final HDFSStoreConfigHolder expectedHdfsStoreConfigHolderTwo = createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName2",
+ "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40,
+ 40, 40, 800);
+ final HDFSStoreConfigHolder expectedHdfsStoreConfigHolderThree = createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName3",
+ "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40,
+ 40, 40, 800);
+
+
+ HdfsStoreDetails d1=new HdfsStoreDetails(expectedHdfsStoreConfigHolderOne.getName(), "member1", "member1");
+ HdfsStoreDetails d2=new HdfsStoreDetails(expectedHdfsStoreConfigHolderTwo.getName(), "member2", "member2");
+ HdfsStoreDetails d3=new HdfsStoreDetails(expectedHdfsStoreConfigHolderThree.getName(), "member3", "member3");
+
+ final Set<HdfsStoreDetails> expectedHdfsStores = new HashSet<HdfsStoreDetails>();
+ expectedHdfsStores.add( d1);
+ expectedHdfsStores.add(d2 );
+ expectedHdfsStores.add(d3);
+
+ final List<Object> results = new ArrayList<Object>();
+ results.add(expectedHdfsStores);
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).setIgnoreDepartedMembers(with(equal(true)));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(ListHDFSStoresFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(results));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockDistributedMember, mockFunctionExecutor);
+
+ final List<?> actualHdfsStores = commands.getHdfsStoreListing(commands.getNormalMembers(mockCache));
+
+ Assert.assertNotNull(actualHdfsStores);
+ Assert.assertTrue(actualHdfsStores.contains(d1));
+ Assert.assertTrue(actualHdfsStores.contains(d2));
+ Assert.assertTrue(actualHdfsStores.contains(d3));
+ }
+
+ @Test(expected = RuntimeException.class)
+ public void testGetHDFSStoreListThrowsRuntimeException() {
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockDistributedMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(ListHDFSStoresFunction.class)));
+ will(throwException(new RuntimeException("expected")));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockDistributedMember, mockFunctionExecutor);
+
+ try {
+ commands.getHdfsStoreListing(commands.getNormalMembers(mockCache));
+ } catch (RuntimeException expected) {
+ assertEquals("expected", expected.getMessage());
+ throw expected;
+ }
+ }
+
+ @Test
+ public void testGetHDFSStoreListReturnsFunctionInvocationTargetExceptionInResults() {
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockDistributedMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final AbstractExecution mockFunctionExecutor = mockContext.mock(AbstractExecution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+
+ final HDFSStoreConfigHolder expectedHdfsStoreConfigHolder = createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName",
+ "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40,
+ 40, 40, 800);
+
+ final List<HdfsStoreDetails> expectedHdfsStores = Arrays.asList(new HdfsStoreDetails(
+ expectedHdfsStoreConfigHolder.getName(), "member1", "member1"));
+
+ final List<Object> results = new ArrayList<Object>();
+
+ results.add(expectedHdfsStores);
+ results.add(new FunctionInvocationTargetException("expected"));
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).setIgnoreDepartedMembers(with(equal(true)));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(ListHDFSStoresFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(results));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockDistributedMember, mockFunctionExecutor);
+
+ final List<HdfsStoreDetails> actualHdfsStores = commands.getHdfsStoreListing(commands
+ .getNormalMembers(mockCache));
+
+ }
+
+ @Test
+ public void testGetCreatedHDFSStore() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ XmlEntity xml = null;
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, xml, "Success");
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(with(aNonNull(HDFSStoreConfigHolder.class)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(CreateHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.getCreatedHdfsStore(null, hdfsStoreName, "hdfs://localhost:9000", "test", null, 20,
+ 20, true, true, 100, 10000, "testStore", true, 10, true, .23F, 10, 10, 10, 10, 10);
+
+ assertNotNull(result);
+ assertEquals(Status.OK, result.getStatus());
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("Member"));
+ assertNotNull(jsonObject.get("Result"));
+
+ assertEquals(memberId, (((JSONArray)jsonObject.get("Member")).get(0)));
+ assertEquals("Success", (((JSONArray)jsonObject.get("Result")).get(0)));
+ }
+
+ @Test
+ public void testGetCreatedHDFSStoreWithThrowable() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ RuntimeException exception = new RuntimeException("Test Exception");
+
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, exception, null);
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(with(aNonNull(HDFSStoreConfigHolder.class)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(CreateHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.getCreatedHdfsStore(null, hdfsStoreName, "hdfs://localhost:9000", "test", null, 20,
+ 20, true, true, 100, 10000, "testStore", true, 10, true, .23F, 10, 10, 10, 10, 10);
+
+ assertNotNull(result);
+ assertEquals(Status.ERROR, result.getStatus());
+
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("Member"));
+ assertNotNull(jsonObject.get("Result"));
+ assertEquals(memberId, (((JSONArray)jsonObject.get("Member")).get(0)));
+ assertEquals("ERROR: " + exception.getClass().getName() + ": " + exception.getMessage(), (((JSONArray)jsonObject
+ .get("Result")).get(0)));
+ }
+
+ @Test
+ public void testGetCreatedHDFSStoreWithCacheClosedException() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, false, null);
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(with(aNonNull(HDFSStoreConfigHolder.class)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(CreateHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.getCreatedHdfsStore(null, hdfsStoreName, "hdfs://localhost:9000", "test", null, 20,
+ 20, true, true, 100, 10000, "testStore", true, 10, true, .23F, 10, 10, 10, 10, 10);
+
+ assertNotNull(result);
+ InfoResultData resultData = (InfoResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("message"));
+
+ assertEquals("Unable to create hdfs store:" + hdfsStoreName, (((JSONArray)jsonObject.get("message")).get(0)));
+ }
+
+ @Test
+ public void testGetAlteredHDFSStore() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ XmlEntity xml = null;
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, xml, "Success");
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(with(aNonNull(HDFSStoreConfigHolder.class)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(AlterHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.getAlteredHDFSStore(null, hdfsStoreName, 100, 100, true, 100, true, 100, 100, 100,
+ 100, 100);
+
+ assertNotNull(result);
+ assertEquals(Status.OK, result.getStatus());
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("Member"));
+ assertNotNull(jsonObject.get("Result"));
+
+ assertEquals(memberId, (((JSONArray)jsonObject.get("Member")).get(0)));
+ assertEquals("Success", (((JSONArray)jsonObject.get("Result")).get(0)));
+ }
+
+ @Test
+ public void testGetAlteredHDFSStoreWithThrowable() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ RuntimeException exception = new RuntimeException("Test Exception");
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, exception, "Success");
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(with(aNonNull(HDFSStoreConfigHolder.class)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(AlterHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.getAlteredHDFSStore(null, hdfsStoreName, 100, 100, true, 100, true, 100, 100, 100,
+ 100, 100);
+
+ assertNotNull(result);
+ assertEquals(Status.ERROR, result.getStatus());
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("Member"));
+ assertNotNull(jsonObject.get("Result"));
+
+ assertEquals(memberId, (((JSONArray)jsonObject.get("Member")).get(0)));
+ assertEquals("ERROR: " + exception.getClass().getName() + ": " + exception.getMessage(), (((JSONArray)jsonObject
+ .get("Result")).get(0)));
+ }
+
+ @Test
+ public void testGetAlteredHDFSStoreWithCacheClosedException() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, false, null);
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(with(aNonNull(HDFSStoreConfigHolder.class)));
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(AlterHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.getAlteredHDFSStore(null, hdfsStoreName, 100, 100, true, 100, true, 100, 100, 100,
+ 100, 100);
+
+ assertNotNull(result);
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ JSONObject jsonObject = (JSONObject)resultData.getGfJsonObject().get("content");
+ assertEquals(0, jsonObject.length());
+ }
+
+ @Test
+ public void testDestroyStore() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ XmlEntity xml = null;
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, xml, "Success");
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(hdfsStoreName);
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DestroyHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.destroyStore(hdfsStoreName, null);
+
+ assertNotNull(result);
+ assertEquals(Status.OK, result.getStatus());
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("Member"));
+ assertNotNull(jsonObject.get("Result"));
+
+ assertEquals(memberId, (((JSONArray)jsonObject.get("Member")).get(0)));
+ assertEquals("Success", (((JSONArray)jsonObject.get("Result")).get(0)));
+ }
+
+ @Test
+ public void testDestroyStoreWithThrowable() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ RuntimeException exception = new RuntimeException("Test Exception");
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, exception, "Success");
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(hdfsStoreName);
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DestroyHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.destroyHdfstore(hdfsStoreName, null);
+
+ assertNotNull(result);
+ assertEquals(Status.ERROR, result.getStatus());
+ TabularResultData resultData = (TabularResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("Member"));
+ assertNotNull(jsonObject.get("Result"));
+
+ assertEquals(memberId, (((JSONArray)jsonObject.get("Member")).get(0)));
+ assertEquals("ERROR: " + exception.getClass().getName() + ": " + exception.getMessage(), (((JSONArray)jsonObject
+ .get("Result")).get(0)));
+ }
+
+ @Test
+ public void testDestroyStoreWithCacheClosedException() throws JSONException {
+ final String hdfsStoreName = "mockHdfsStore";
+ final String memberId = "mockMember";
+ final Cache mockCache = mockContext.mock(Cache.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final Execution mockFunctionExecutor = mockContext.mock(Execution.class, "Function Executor");
+ final ResultCollector mockResultCollector = mockContext.mock(ResultCollector.class, "ResultCollector");
+ final CliFunctionResult cliResult = new CliFunctionResult(memberId, false, null);
+ // Need to fix the return value of this function
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionExecutor).withArgs(hdfsStoreName);
+ will(returnValue(mockFunctionExecutor));
+ oneOf(mockFunctionExecutor).execute(with(aNonNull(DestroyHDFSStoreFunction.class)));
+ will(returnValue(mockResultCollector));
+ oneOf(mockResultCollector).getResult();
+ will(returnValue(Arrays.asList(cliResult)));
+ }
+ });
+
+ final HDFSStoreCommands commands = new TestHDFSStoreCommands(mockCache, mockMember, mockFunctionExecutor);
+
+ final Result result = commands.destroyHdfstore(hdfsStoreName, null);
+
+ assertNotNull(result);
+
+ assertNotNull(result);
+ InfoResultData resultData = (InfoResultData)((CommandResult)result).getResultData();
+ GfJsonObject jsonObject = resultData.getGfJsonObject().getJSONObject("content");
+ assertNotNull(jsonObject.get("message"));
+
+ assertEquals("No matching hdfs stores found.", (((JSONArray)jsonObject.get("message")).get(0)));
+ }
+
+ public static HDFSStoreConfigHolder createMockHDFSStoreConfigHolder(Mockery mockContext, final String storeName, final String namenode,
+ final String homeDir, final int maxFileSize, final int fileRolloverInterval, final float blockCachesize,
+ final String clientConfigFile, final int batchSize, final int batchInterval, final String diskStoreName,
+ final boolean syncDiskwrite, final int dispatcherThreads, final int maxMemory, final boolean bufferPersistent,
+ final boolean minorCompact, final boolean majorCompact, final int majorCompactionInterval,
+ final int majorCompactionThreads, final int minorCompactionThreads, final int purgeInterval) {
+
+ HDFSStoreConfigHolder mockHdfsStore = mockContext.mock(HDFSStoreConfigHolder.class, "HDFSStoreConfigHolder_"
+ + storeName);
+
+ createMockStore(mockContext, mockHdfsStore, storeName, namenode, homeDir, maxFileSize, fileRolloverInterval,
+ minorCompact, minorCompactionThreads, majorCompact, majorCompactionThreads, majorCompactionInterval,
+ purgeInterval, blockCachesize, clientConfigFile, batchSize,
+ batchInterval, diskStoreName, syncDiskwrite, dispatcherThreads, maxMemory, bufferPersistent);
+ return mockHdfsStore;
+
+ }
+
+ public static void createMockStore(Mockery mockContext, final HDFSStore mockStore, final String storeName,
+ final String namenode, final String homeDir, final int maxFileSize, final int fileRolloverInterval,
+ final boolean minorCompact, final int minorCompactionThreads, final boolean majorCompact,
+ final int majorCompactionThreads, final int majorCompactionInterval, final int purgeInterval,
+ final float blockCachesize, final String clientConfigFile, final int batchSize, final int batchInterval,
+ final String diskStoreName, final boolean syncDiskwrite, final int dispatcherThreads, final int maxMemory,
+ final boolean bufferPersistent) {
+
+ mockContext.checking(new Expectations() {
+ {
+ allowing(mockStore).getName();
+ will(returnValue(storeName));
+ allowing(mockStore).getNameNodeURL();
+ will(returnValue(namenode));
+ allowing(mockStore).getHomeDir();
+ will(returnValue(homeDir));
+ allowing(mockStore).getWriteOnlyFileRolloverSize();
+ will(returnValue(maxFileSize));
+ allowing(mockStore).getWriteOnlyFileRolloverInterval();
+ will(returnValue(fileRolloverInterval));
+ allowing(mockStore).getMinorCompaction();
+ will(returnValue(minorCompact));
+ allowing(mockStore).getMajorCompaction();
+ will(returnValue(majorCompact));
+ allowing(mockStore).getMajorCompactionInterval();
+ will(returnValue(majorCompactionInterval));
+ allowing(mockStore).getMajorCompactionThreads();
+ will(returnValue(majorCompactionThreads));
+ allowing(mockStore).getMinorCompactionThreads();
+ will(returnValue(minorCompactionThreads));
+ allowing(mockStore).getPurgeInterval();
+ will(returnValue(purgeInterval));
+ allowing(mockStore).getInputFileCountMax();
+ will(returnValue(10));
+ allowing(mockStore).getInputFileSizeMax();
+ will(returnValue(1024));
+ allowing(mockStore).getInputFileCountMin();
+ will(returnValue(2));
+ allowing(mockStore).getBlockCacheSize();
+ will(returnValue(blockCachesize));
+ allowing(mockStore).getHDFSClientConfigFile();
+ will(returnValue(clientConfigFile));
+
+ allowing(mockStore).getBatchSize();
+ will(returnValue(batchSize));
+ allowing(mockStore).getBatchInterval();
+ will(returnValue(batchInterval));
+ allowing(mockStore).getDiskStoreName();
+ will(returnValue(diskStoreName));
+ allowing(mockStore).getSynchronousDiskWrite();
+ will(returnValue(syncDiskwrite));
+ allowing(mockStore).getBufferPersistent();
+ will(returnValue(bufferPersistent));
+ allowing(mockStore).getDispatcherThreads();
+ will(returnValue(dispatcherThreads));
+ allowing(mockStore).getMaxMemory();
+ will(returnValue(maxMemory));
+ }
+ });
+ }
+
+ protected static class TestHDFSStoreCommands extends HDFSStoreCommands {
+
+ private final Cache cache;
+
+ private final DistributedMember distributedMember;
+
+ private final Execution functionExecutor;
+
+ public TestHDFSStoreCommands(final Cache cache, final DistributedMember distributedMember,
+ final Execution functionExecutor) {
+ assert cache != null: "The Cache cannot be null!";
+ this.cache = cache;
+ this.distributedMember = distributedMember;
+ this.functionExecutor = functionExecutor;
+ }
+
+ @Override
+ protected Cache getCache() {
+ return this.cache;
+ }
+
+ @Override
+ protected Set<DistributedMember> getMembers(final Cache cache) {
+ assertSame(getCache(), cache);
+ return Collections.singleton(this.distributedMember);
+ }
+
+ @Override
+ protected Execution getMembersFunctionExecutor(final Set<DistributedMember> members) {
+ Assert.assertNotNull(members);
+ return this.functionExecutor;
+ }
+
+ @Override
+ protected Set<DistributedMember> getNormalMembers(final Cache cache) {
+ assertSame(getCache(), cache);
+ return Collections.singleton(this.distributedMember);
+ }
+
+ @Override
+ protected Set<DistributedMember> getGroupMembers(String[] groups) {
+ Set<DistributedMember> dm = new HashSet<DistributedMember>();
+ dm.add(distributedMember);
+ return dm;
+
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/9f3f10fd/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/AlterHDFSStoreFunctionJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/AlterHDFSStoreFunctionJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/AlterHDFSStoreFunctionJUnitTest.java
new file mode 100644
index 0000000..4a93e30
--- /dev/null
+++ b/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/AlterHDFSStoreFunctionJUnitTest.java
@@ -0,0 +1,324 @@
+/*=========================================================================
+ * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
+ * This product is protected by U.S. and international copyright
+ * and intellectual property laws. Pivotal products are covered by
+ * one or more patents listed at http://www.pivotal.io/patents.
+ *=========================================================================
+ */
+
+package com.gemstone.gemfire.management.internal.cli.functions;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.logging.log4j.Logger;
+import org.jmock.Expectations;
+import org.jmock.Mockery;
+import org.jmock.lib.legacy.ClassImposteriser;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheClosedException;
+import com.gemstone.gemfire.cache.execute.FunctionContext;
+import com.gemstone.gemfire.cache.execute.ResultSender;
+import com.gemstone.gemfire.cache.hdfs.HDFSStore;
+import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder;
+import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl;
+import com.gemstone.gemfire.distributed.DistributedMember;
+import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
+import com.gemstone.gemfire.internal.cache.InternalCache;
+import com.gemstone.gemfire.internal.logging.LogService;
+import com.gemstone.gemfire.management.internal.cli.commands.HDFSStoreCommandsJUnitTest;
+import com.gemstone.gemfire.management.internal.cli.functions.AlterHDFSStoreFunction.AlterHDFSStoreAttributes;
+import com.gemstone.gemfire.management.internal.configuration.domain.XmlEntity;
+import com.gemstone.gemfire.test.junit.categories.HoplogTest;
+import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
+
+/**
+ * The AlterHDFSStoreFunctionJUnitTest test suite class tests the contract and
+ * functionality of the AlterHDFSStoreFunction class. </p>
+ *
+ * @author Namrata Thanvi
+ * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl
+ * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder
+ * @see com.gemstone.gemfire.management.internal.cli.functions.AlterHDFSStoreFunction
+ * @see org.jmock.Expectations
+ * @see org.jmock.Mockery
+ * @see org.junit.Assert
+ * @see org.junit.Test
+ */
+@SuppressWarnings( { "unused" })
+@Category({IntegrationTest.class, HoplogTest.class})
+public class AlterHDFSStoreFunctionJUnitTest {
+
+ private static final Logger logger = LogService.getLogger();
+
+ private Mockery mockContext;
+
+ @Before
+ public void setup() {
+ mockContext = new Mockery() {
+ {
+ setImposteriser(ClassImposteriser.INSTANCE);
+ }
+ };
+ }
+
+ @After
+ public void tearDown() {
+ mockContext.assertIsSatisfied();
+ mockContext = null;
+ }
+
+ @Test
+ public void testExecute() throws Throwable {
+
+ final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "FunctionContext");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final String memberId = "mockMemberId";
+ final String memberName = "mockMemberName";
+
+ final AlterHDFSStoreFunction function = createAlterHDFSStoreFunction(mockCache, mockMember, xmlEntity);
+ final TestResultSender testResultSender = new TestResultSender();
+ final HDFSStoreImpl mockHdfsStore = CreateHDFSStoreFunctionJUnitTest.createMockHDFSStoreImpl(mockContext,
+ "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false,
+ false, true, 20, 20, 10, 100);
+ final AlterHDFSStoreAttributes alterHDFSStoreAttributes = new AlterHDFSStoreAttributes(
+ "mockStore", 100, 100, false, false, 100, 100, 100, 100, 100,
+ 100);
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ exactly(2).of(mockMember).getName();
+ will(returnValue(memberName));
+ oneOf(mockFunctionContext).getArguments();
+ will(returnValue(alterHDFSStoreAttributes));
+ oneOf(mockCache).findHDFSStore(alterHDFSStoreAttributes.getHdfsUniqueName());
+ will(returnValue(mockHdfsStore));
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+ function.execute(mockFunctionContext);
+
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals(memberName, result.getMemberIdOrName());
+ assertEquals("Success", result.getMessage());
+
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testExecuteOnMemberHavingNoHDFSStore() throws Throwable {
+
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "FunctionContext");
+ final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final String memberId = "mockMemberId";
+ final String memberName = "mockMemberName";
+
+ final TestResultSender testResultSender = new TestResultSender();
+ final AlterHDFSStoreFunction function = createAlterHDFSStoreFunction(mockCache, mockMember, xmlEntity);
+ final AlterHDFSStoreAttributes alterHDFSStoreAttributes = new AlterHDFSStoreAttributes(
+ "mockStore", 100, 100, false, false, 100, 100, 100, 100, 100,
+ 100);
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockCache).findHDFSStore(alterHDFSStoreAttributes.getHdfsUniqueName());
+ will(returnValue(null));
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ exactly(2).of(mockMember).getName();
+ will(returnValue(memberName));
+ oneOf(mockFunctionContext).getArguments();
+ will(returnValue(alterHDFSStoreAttributes));
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+ function.execute(mockFunctionContext);
+
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals(memberName, result.getMemberIdOrName());
+ assertEquals("Hdfs store not found on this member", result.getMessage());
+ }
+
+ @Test
+ public void testExecuteOnMemberWithNoCache() throws Throwable {
+
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "MockFunctionContext");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final InternalCache mockCache = mockContext.mock(InternalCache.class, "Cache");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final TestResultSender testResultSender = new TestResultSender();
+ final AlterHDFSStoreAttributes alterHDFSStoreAttributes = new AlterHDFSStoreAttributes(
+ "mockStore", 100, 100, false, false, 100, 100, 100, 100, 100,
+ 100);
+
+ final AlterHDFSStoreFunction function = new TestAlterHDFSStoreFunction(mockCache, mockMember, xmlEntity) {
+ @Override
+ protected Cache getCache() {
+ throw new CacheClosedException("Expected");
+ }
+ };
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionContext).getArguments();
+ will(returnValue(alterHDFSStoreAttributes));
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+ function.execute(mockFunctionContext);
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals("", result.getMemberIdOrName());
+ assertNull(result.getMessage());
+ }
+
+ @Test
+ public void testExecuteHandleRuntimeException() throws Throwable {
+
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "FunctionContext");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final String memberId = "mockMemberId";
+ final String memberName = "mockMemberName";
+ final TestResultSender testResultSender = new TestResultSender();
+ final AlterHDFSStoreFunction function = createAlterHDFSStoreFunction(mockCache, mockMember, xmlEntity);
+
+ final AlterHDFSStoreAttributes alterHDFSStoreAttributes = new AlterHDFSStoreAttributes(
+ "mockStore", 100, 100, false, false, 100, 100, 100, 100, 100,
+ 100);
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ exactly(2).of(mockMember).getName();
+ will(returnValue(memberName));
+ oneOf(mockFunctionContext).getArguments();
+ will(returnValue(alterHDFSStoreAttributes));
+ oneOf(mockCache).findHDFSStore(alterHDFSStoreAttributes.getHdfsUniqueName());
+ will(throwException(new RuntimeException("expected")));
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+ function.execute(mockFunctionContext);
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals(memberName, result.getMemberIdOrName());
+ assertEquals("expected", result.getThrowable().getMessage());
+
+ }
+
+ protected TestAlterHDFSStoreFunction createAlterHDFSStoreFunction(final Cache cache, DistributedMember member,
+ XmlEntity xml) {
+ return new TestAlterHDFSStoreFunction(cache, member, xml);
+ }
+
+ protected static class TestAlterHDFSStoreFunction extends AlterHDFSStoreFunction {
+ private static final long serialVersionUID = 1L;
+
+ private final Cache cache;
+
+ private final DistributedMember member;
+
+ private final XmlEntity xml;
+
+ public TestAlterHDFSStoreFunction(final Cache cache, DistributedMember member, XmlEntity xml) {
+ this.cache = cache;
+ this.member = member;
+ this.xml = xml;
+ }
+
+ @Override
+ protected Cache getCache() {
+ return this.cache;
+ }
+
+ @Override
+ protected DistributedMember getDistributedMember(Cache cache) {
+ return member;
+ }
+
+ @Override
+ protected XmlEntity getXMLEntity(String storeName) {
+ return xml;
+ }
+
+ @Override
+ protected HDFSStore alterHdfsStore(HDFSStore hdfsStore, AlterHDFSStoreAttributes alterAttributes) {
+ return hdfsStore;
+ }
+ }
+
+ protected static class TestResultSender implements ResultSender {
+
+ private final List<Object> results = new LinkedList<Object>();
+
+ private Throwable t;
+
+ protected List<Object> getResults() throws Throwable {
+ if (t != null) {
+ throw t;
+ }
+ return Collections.unmodifiableList(results);
+ }
+
+ public void lastResult(final Object lastResult) {
+ results.add(lastResult);
+ }
+
+ public void sendResult(final Object oneResult) {
+ results.add(oneResult);
+ }
+
+ public void sendException(final Throwable t) {
+ this.t = t;
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/9f3f10fd/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/CreateHDFSStoreFunctionJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/CreateHDFSStoreFunctionJUnitTest.java b/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/CreateHDFSStoreFunctionJUnitTest.java
new file mode 100644
index 0000000..8a012b4
--- /dev/null
+++ b/geode-core/src/test/java/com/gemstone/gemfire/management/internal/cli/functions/CreateHDFSStoreFunctionJUnitTest.java
@@ -0,0 +1,307 @@
+/*=========================================================================
+ * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
+ * This product is protected by U.S. and international copyright
+ * and intellectual property laws. Pivotal products are covered by
+ * one or more patents listed at http://www.pivotal.io/patents.
+ *=========================================================================
+ */
+
+package com.gemstone.gemfire.management.internal.cli.functions;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.logging.log4j.Logger;
+import org.jmock.Expectations;
+import org.jmock.Mockery;
+import org.jmock.lib.legacy.ClassImposteriser;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheClosedException;
+import com.gemstone.gemfire.cache.execute.FunctionContext;
+import com.gemstone.gemfire.cache.execute.ResultSender;
+import com.gemstone.gemfire.cache.hdfs.HDFSStore;
+import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder;
+import com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl;
+import com.gemstone.gemfire.distributed.DistributedMember;
+import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
+import com.gemstone.gemfire.internal.logging.LogService;
+import com.gemstone.gemfire.management.internal.cli.commands.HDFSStoreCommandsJUnitTest;
+import com.gemstone.gemfire.management.internal.configuration.domain.XmlEntity;
+import com.gemstone.gemfire.test.junit.categories.HoplogTest;
+import com.gemstone.gemfire.test.junit.categories.IntegrationTest
+;
+
+/**
+ * The AlterHDFSStoreFunctionJUnitTest test suite class tests the contract and
+ * functionality of the AlterHDFSStoreFunction class. </p>
+ *
+ * @author Namrata Thanvi
+ * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl
+ * @see com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreConfigHolder
+ * @see com.gemstone.gemfire.management.internal.cli.functions.AlterHDFSStoreFunction
+ * @see org.jmock.Expectations
+ * @see org.jmock.Mockery
+ * @see org.junit.Assert
+ * @see org.junit.Test
+ */
+@SuppressWarnings( { "unused" })
+@Category({IntegrationTest.class, HoplogTest.class})
+public class CreateHDFSStoreFunctionJUnitTest {
+
+ private static final Logger logger = LogService.getLogger();
+
+ private Mockery mockContext;
+
+ private static Properties props = new Properties();
+
+ @Before
+ public void setup() {
+
+ mockContext = new Mockery() {
+ {
+ setImposteriser(ClassImposteriser.INSTANCE);
+ }
+ };
+ }
+
+ @After
+ public void tearDown() {
+ mockContext.assertIsSatisfied();
+ mockContext = null;
+ }
+
+ @Test
+ public void testExecute() throws Throwable {
+
+ final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "FunctionContext");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final String memberId = "mockMemberId";
+ final String memberName = "mockMemberName";
+
+ final TestResultSender testResultSender = new TestResultSender();
+
+ final HDFSStoreImpl mockHdfsStore = createMockHDFSStoreImpl(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir",
+ 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false, false, true, 20, 20, 10, 100);
+
+ final HDFSStoreConfigHolder mockHdfsStoreConfigHolder = HDFSStoreCommandsJUnitTest.createMockHDFSStoreConfigHolder(
+ mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0,
+ 2048, true, true, true, 40, 40, 40, 800);
+
+ final CreateHDFSStoreFunction function = new TestCreateHDFSStoreFunction(mockCache, mockMember, xmlEntity , mockHdfsStore);
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockMember).getId();
+ will(returnValue(memberId));
+ exactly(2).of(mockMember).getName();
+ will(returnValue(memberName));
+ oneOf(mockFunctionContext).getArguments();
+ will(returnValue(mockHdfsStoreConfigHolder));
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+ function.execute(mockFunctionContext);
+
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals(memberName, result.getMemberIdOrName());
+ assertEquals("Success", result.getMessage());
+
+ }
+
+
+
+ @Test
+ public void testExecuteOnMemberWithNoCache() throws Throwable {
+
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "MockFunctionContext");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final String memberId = "mockMemberId";
+ final String memberName = "mockMemberName";
+
+ final TestResultSender testResultSender = new TestResultSender();
+ final HDFSStoreImpl mockHdfsStore = createMockHDFSStoreImpl(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir",
+ 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false, false, true, 20, 20, 10, 100);
+
+ final HDFSStoreConfigHolder mockHdfsStoreConfigHolder = HDFSStoreCommandsJUnitTest.createMockHDFSStoreConfigHolder(mockContext, "hdfsStoreName",
+ "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0, 2048, true, true, true, 40,
+ 40, 40, 800);
+
+ final CreateHDFSStoreFunction function = new TestCreateHDFSStoreFunction(mockCache, mockMember, xmlEntity , mockHdfsStore) {
+ @Override
+ protected Cache getCache() {
+ throw new CacheClosedException("Expected");
+ }
+ };
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+ function.execute(mockFunctionContext);
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals("", result.getMemberIdOrName());
+ assertNull(result.getMessage());
+ }
+
+
+ @Test
+ public void testExecuteHandleRuntimeException() throws Throwable {
+
+ final FunctionContext mockFunctionContext = mockContext.mock(FunctionContext.class, "MockFunctionContext");
+ final DistributedMember mockMember = mockContext.mock(DistributedMember.class, "DistributedMember");
+ final GemFireCacheImpl mockCache = mockContext.mock(GemFireCacheImpl.class, "Cache");
+ final XmlEntity xmlEntity = mockContext.mock(XmlEntity.class, "XmlEntity");
+
+ final String memberId = "mockMemberId";
+ final String memberName = "mockMemberName";
+
+ final TestResultSender testResultSender = new TestResultSender();
+ final HDFSStoreImpl mockHdfsStore = createMockHDFSStoreImpl(mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir",
+ 1024, 20, .25f, null, 20, 20, null, false, 0, 1024, false, false, true, 20, 20, 10, 100);
+
+ final HDFSStoreConfigHolder mockHdfsStoreConfigHolder = HDFSStoreCommandsJUnitTest.createMockHDFSStoreConfigHolder(
+ mockContext, "hdfsStoreName", "hdfs://localhost:9000", "testDir", 1024, 20, .25f, null, 40, 40, null, false, 0,
+ 2048, true, true, true, 40, 40, 40, 800);
+
+ final CreateHDFSStoreFunction function = new TestCreateHDFSStoreFunction(mockCache, mockMember, xmlEntity , mockHdfsStore) {
+ @Override
+ protected Cache getCache() {
+ throw new RuntimeException("expected");
+ }
+ };
+
+ mockContext.checking(new Expectations() {
+ {
+ oneOf(mockFunctionContext).getResultSender();
+ will(returnValue(testResultSender));
+ }
+ });
+
+
+ function.execute(mockFunctionContext);
+ final List<?> results = testResultSender.getResults();
+
+ assertNotNull(results);
+ assertEquals(1, results.size());
+
+ final CliFunctionResult result = (CliFunctionResult)results.get(0);
+ assertEquals("", result.getMemberIdOrName());
+ assertEquals("expected", result.getThrowable().getMessage());
+
+ }
+
+ public static HDFSStoreImpl createMockHDFSStoreImpl(Mockery mockContext, final String storeName, final String namenode, final String homeDir,
+ final int maxFileSize, final int fileRolloverInterval, final float blockCachesize, final String clientConfigFile,
+ final int batchSize, final int batchInterval, final String diskStoreName, final boolean syncDiskwrite,
+ final int dispatcherThreads, final int maxMemory, final boolean bufferPersistent, final boolean minorCompact,
+ final boolean majorCompact, final int majorCompactionInterval, final int majorCompactionThreads,
+ final int minorCompactionThreads, final int purgeInterval) {
+
+ HDFSStoreImpl mockHdfsStore = mockContext.mock(HDFSStoreImpl.class, "HDFSStoreImpl");
+
+ HDFSStoreCommandsJUnitTest.createMockStore(mockContext, mockHdfsStore, storeName, namenode, homeDir, maxFileSize,
+ fileRolloverInterval, minorCompact, minorCompactionThreads, majorCompact, majorCompactionThreads,
+ majorCompactionInterval, purgeInterval, blockCachesize, clientConfigFile, batchSize, batchInterval,
+ diskStoreName, syncDiskwrite, dispatcherThreads, maxMemory, bufferPersistent);
+
+ return mockHdfsStore;
+ }
+
+ protected static class TestCreateHDFSStoreFunction extends CreateHDFSStoreFunction {
+ private static final long serialVersionUID = 1L;
+
+ private final Cache cache;
+
+ private final DistributedMember member;
+
+ private final XmlEntity xml;
+
+ private final HDFSStoreImpl hdfsStore;
+
+ public TestCreateHDFSStoreFunction(Cache cache, DistributedMember member, XmlEntity xml , HDFSStoreImpl hdfsStore) {
+ this.cache = cache;
+ this.member = member;
+ this.xml = xml;
+ this.hdfsStore = hdfsStore;
+ }
+
+ @Override
+ protected Cache getCache() {
+ return this.cache;
+ }
+
+ @Override
+ protected DistributedMember getDistributedMember(Cache cache) {
+ return member;
+ }
+
+ @Override
+ protected XmlEntity getXMLEntity(String storeName) {
+ return xml;
+ }
+
+ @Override
+ protected HDFSStoreImpl createHdfsStore(Cache cache, HDFSStoreConfigHolder configHolder){
+ return hdfsStore;
+ }
+ }
+
+ protected static class TestResultSender implements ResultSender {
+
+ private final List<Object> results = new LinkedList<Object>();
+
+ private Throwable t;
+
+ protected List<Object> getResults() throws Throwable {
+ if (t != null) {
+ throw t;
+ }
+ return Collections.unmodifiableList(results);
+ }
+
+ public void lastResult(final Object lastResult) {
+ results.add(lastResult);
+ }
+
+ public void sendResult(final Object oneResult) {
+ results.add(oneResult);
+ }
+
+ public void sendException(final Throwable t) {
+ this.t = t;
+ }
+ }
+
+}