You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by ji...@apache.org on 2018/08/21 20:57:14 UTC

[geode] branch develop updated: GEODE-5204: add xml-file support to import/export cluster-config command (#1986)

This is an automated email from the ASF dual-hosted git repository.

jinmeiliao pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/geode.git


The following commit(s) were added to refs/heads/develop by this push:
     new 1883e81  GEODE-5204: add xml-file support to import/export cluster-config command  (#1986)
1883e81 is described below

commit 1883e81bbe07eeb4ca79bfdbfc0db279c7a7f213
Author: jinmeiliao <ji...@pivotal.io>
AuthorDate: Tue Aug 21 13:57:07 2018 -0700

    GEODE-5204: add xml-file support to import/export cluster-config command  (#1986)
    
    * GEODE-5204: have Import/ExportClusterConfigCommand import/export an individual xml file and stage the xml as well.
---
 .../cli/commands/ImportClusterConfigTest.java      |   4 +-
 ...ExportClusterConfigurationCommandDUnitTest.java |  89 +++++++
 ...ImportClusterConfigurationCommandDUnitTest.java | 152 +++++++++++
 .../ClusterConfigImportDUnitTest.java              |  18 +-
 .../ClusterConfigStartMemberDUnitTest.java         |   1 -
 .../ClusterConfigWithCallbacksDUnitTest.java       |   3 +-
 .../internal/security/MultiGfshDUnitTest.java      |   3 +
 .../InternalConfigurationPersistenceService.java   |  92 +++++--
 .../apache/geode/management/cli/GfshCommand.java   |   8 +-
 .../ExportClusterConfigurationCommand.java         | 208 +++++++++++++++
 .../internal/cli/commands/ExportConfigCommand.java |   7 +-
 .../ExportImportClusterConfigurationCommands.java  | 288 ---------------------
 .../ImportClusterConfigurationCommand.java         | 266 +++++++++++++++++++
 .../internal/cli/commands/InternalGfshCommand.java |   7 +
 .../internal/cli/result/ModelCommandResult.java    |  13 +
 .../internal/cli/result/model/DataResultModel.java |   5 +
 .../internal/cli/result/model/FileResultModel.java |  25 +-
 .../internal/cli/result/model/InfoResultModel.java |   4 +
 .../internal/cli/result/model/ResultModel.java     |  33 +--
 .../sanctioned-geode-core-serializables.txt        |   1 +
 .../ExportClusterConfigurationCommandTest.java     | 117 +++++++++
 .../ImportClusterConfigurationCommandTest.java     | 192 ++++++++++++++
 .../commands/QueryCommandIntegrationTestBase.java  |  10 +-
 .../internal/configuration/ClusterConfig.java      |   2 +-
 .../security/GfshCommandsSecurityTestBase.java     |   5 +
 .../internal/configuration/ConfigGroup.java        |   8 +-
 26 files changed, 1195 insertions(+), 366 deletions(-)

diff --git a/geode-assembly/src/acceptanceTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigTest.java b/geode-assembly/src/acceptanceTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigTest.java
index 2da651b..b4e55bc 100644
--- a/geode-assembly/src/acceptanceTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigTest.java
+++ b/geode-assembly/src/acceptanceTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigTest.java
@@ -46,8 +46,8 @@ public class ImportClusterConfigTest extends ClusterConfigTestBase {
         .of("connect", "import cluster-configuration --zip-file-name=" + clusterConfigZipPath)
         .withName("importConfiguration").execute(gfsh);
     assertThat(importConfiguration.getOutputText())
-        .contains("Cluster configuration successfully imported").contains(
-            "Successfully applied the imported cluster configuration on " + serverNotShutDownName);
+        .contains("Cluster configuration successfully imported")
+        .contains("Configure the servers in 'cluster' group: ");
 
     GfshExecution listMembers =
         GfshScript.of("connect", "list members").withName("listMembers").execute(gfsh);
diff --git a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommandDUnitTest.java b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommandDUnitTest.java
new file mode 100644
index 0000000..9c14c43
--- /dev/null
+++ b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommandDUnitTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.apache.geode.distributed.internal.DistributionConfig.GROUPS_NAME;
+import static org.apache.geode.management.internal.cli.i18n.CliStrings.EXPORT_SHARED_CONFIG;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Properties;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import org.apache.geode.test.dunit.rules.ClusterStartupRule;
+import org.apache.geode.test.dunit.rules.MemberVM;
+import org.apache.geode.test.junit.rules.GfshCommandRule;
+
+public class ExportClusterConfigurationCommandDUnitTest {
+  @ClassRule
+  public static ClusterStartupRule cluster = new ClusterStartupRule();
+
+  @ClassRule
+  public static GfshCommandRule gfsh = new GfshCommandRule();
+
+  @ClassRule
+  public static TemporaryFolder tempFolder = new TemporaryFolder();
+
+
+  private static File xmlFile;
+  private static MemberVM locator;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    xmlFile = tempFolder.newFile("my.xml");
+    locator = cluster.startLocatorVM(0);
+    Properties properties = new Properties();
+    properties.setProperty(GROUPS_NAME, "groupB");
+    cluster.startServerVM(1, properties, locator.getPort());
+    gfsh.connectAndVerify(locator);
+    gfsh.executeAndAssertThat("create region --name=regionA --type=REPLICATE").statusIsSuccess();
+    gfsh.executeAndAssertThat("create region --name=regionB --type=REPLICATE --group=groupB")
+        .statusIsSuccess();
+  }
+
+  @Test
+  public void getClusterConfig() {
+    gfsh.executeAndAssertThat(EXPORT_SHARED_CONFIG).statusIsSuccess()
+        .containsOutput("<region name=\"regionA\">").containsOutput("cluster.xml")
+        .doesNotContainOutput("<region name=\"regionB\">");
+  }
+
+
+  @Test
+  public void getClusterConfigInGroup() {
+    gfsh.executeAndAssertThat(EXPORT_SHARED_CONFIG + " --group=groupB")
+        .containsOutput("<region name=\"regionB\">")
+        .doesNotContainOutput("<region name=\"regionA\">");
+  }
+
+  @Test
+  public void getClusterConfigWithFile() throws IOException {
+    gfsh.executeAndAssertThat(EXPORT_SHARED_CONFIG + " --xml-file=" + xmlFile.getAbsolutePath())
+        .statusIsSuccess().containsOutput("cluster.xml")
+        .containsOutput("xml content exported to " + xmlFile.getAbsolutePath());
+
+    assertThat(xmlFile).exists();
+    String content = FileUtils.readFileToString(xmlFile, Charset.defaultCharset());
+    assertThat(content).startsWith("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>")
+        .contains("<region name=\"regionA\">");
+  }
+}
diff --git a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommandDUnitTest.java b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommandDUnitTest.java
new file mode 100644
index 0000000..80dde7b
--- /dev/null
+++ b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommandDUnitTest.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.apache.geode.distributed.internal.DistributionConfig.GROUPS_NAME;
+import static org.apache.geode.management.internal.cli.i18n.CliStrings.IMPORT_SHARED_CONFIG;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Properties;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import org.apache.geode.cache.DataPolicy;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.RegionShortcut;
+import org.apache.geode.test.dunit.rules.ClusterStartupRule;
+import org.apache.geode.test.dunit.rules.MemberVM;
+import org.apache.geode.test.junit.rules.GfshCommandRule;
+
+public class ImportClusterConfigurationCommandDUnitTest {
+
+  @Rule
+  public ClusterStartupRule cluster = new ClusterStartupRule();
+
+  @Rule
+  public GfshCommandRule gfsh = new GfshCommandRule();
+
+  @Rule
+  public TemporaryFolder tempFolder = new TemporaryFolder();
+
+  private File xmlFile;
+  private MemberVM locator;
+  private String commandWithFile;
+
+  private static String CLUSTER_XML =
+      "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n"
+          + "<cache xmlns=\"http://geode.apache.org/schema/cache\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" copy-on-read=\"false\" is-server=\"false\" lock-lease=\"120\" lock-timeout=\"60\" search-timeout=\"300\" version=\"1.0\" xsi:schemaLocation=\"http://geode.apache.org/schema/cache http://geode.apache.org/schema/cache/cache-1.0.xsd\">\n"
+          + "<region name=\"regionForCluster\">\n"
+          + "    <region-attributes data-policy=\"replicate\" scope=\"distributed-ack\"/>\n"
+          + "  </region>\n" + "</cache>\n";
+
+  private static String GROUP_XML = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n"
+      + "<cache xmlns=\"http://geode.apache.org/schema/cache\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" copy-on-read=\"false\" is-server=\"false\" lock-lease=\"120\" lock-timeout=\"60\" search-timeout=\"300\" version=\"1.0\" xsi:schemaLocation=\"http://geode.apache.org/schema/cache http://geode.apache.org/schema/cache/cache-1.0.xsd\">\n"
+      + "<region name=\"regionForGroupA\">\n"
+      + "    <region-attributes data-policy=\"replicate\" scope=\"distributed-ack\"/>\n"
+      + "  </region>\n" + "</cache>\n";
+
+  @Before
+  public void setUp() throws Exception {
+    xmlFile = tempFolder.newFile("my.xml");
+    FileUtils.write(xmlFile, CLUSTER_XML, Charset.defaultCharset());
+    commandWithFile = IMPORT_SHARED_CONFIG + " --xml-file=" + xmlFile.getAbsolutePath() + " ";
+    locator = cluster.startLocatorVM(0);
+    gfsh.connectAndVerify(locator);
+  }
+
+  @Test
+  public void noServerToBeginWith() throws IOException {
+    gfsh.executeAndAssertThat(commandWithFile).statusIsSuccess()
+        .containsOutput("Successfully set the 'cluster' configuration to the content of");
+
+    MemberVM server1 = cluster.startServerVM(1, locator.getPort());
+    server1.invoke(() -> {
+      Region region = ClusterStartupRule.getCache().getRegion("regionForCluster");
+      assertThat(region).isNotNull();
+      assertThat(region.getAttributes().getDataPolicy()).isEqualTo(DataPolicy.REPLICATE);
+    });
+
+    // you can not configure the servers because they are not vanilla servers
+    gfsh.executeAndAssertThat(commandWithFile).statusIsError()
+        .containsOutput("Can not configure servers that are already configured");
+
+    // you can only stage with existing servers
+    gfsh.executeAndAssertThat(commandWithFile + " --action=STAGE").statusIsSuccess()
+        .containsOutput("Existing servers are not affected with this configuration change");
+
+    FileUtils.write(xmlFile, GROUP_XML, Charset.defaultCharset());
+    // you can set cluster configuration for another group
+    gfsh.executeAndAssertThat(commandWithFile + " --group=groupA").statusIsSuccess()
+        .containsOutput("Successfully set the 'groupA' configuration to the content of");
+
+    // when start up another server in groupA, it should get both regions
+    Properties properties = new Properties();
+    properties.setProperty(GROUPS_NAME, "groupA");
+    MemberVM server2 = cluster.startServerVM(2, properties, locator.getPort());
+    server2.invoke(() -> {
+      Region region1 = ClusterStartupRule.getCache().getRegion("regionForCluster");
+      Region region2 = ClusterStartupRule.getCache().getRegion("regionForGroupA");
+      assertThat(region1).isNotNull();
+      assertThat(region2).isNotNull();
+    });
+
+    // server1 is not affected
+    server1.invoke(() -> {
+      Region region1 = ClusterStartupRule.getCache().getRegion("regionForCluster");
+      Region region2 = ClusterStartupRule.getCache().getRegion("regionForGroupA");
+      assertThat(region1).isNotNull();
+      assertThat(region2).isNull();
+    });
+  }
+
+  @Test
+  public void canNotConfigureIfServersAreNotEmpty() throws IOException {
+    // start a server, and create a standalone region on that server
+    MemberVM server = cluster.startServerVM(1, locator.getPort());
+    server.invoke(() -> {
+      ClusterStartupRule.getCache().createRegionFactory(RegionShortcut.REPLICATE).create("regionA");
+    });
+
+    gfsh.executeAndAssertThat(commandWithFile).statusIsError()
+        .containsOutput("Can not configure servers with existing regions: regionA");
+  }
+
+  @Test
+  public void configureVanillaServers() throws IOException {
+    Properties properties = new Properties();
+    properties.setProperty(GROUPS_NAME, "groupA");
+    MemberVM serverA = cluster.startServerVM(1, properties, locator.getPort());
+    gfsh.executeAndAssertThat(commandWithFile + " --group=groupA").statusIsSuccess()
+        .containsOutput("Successfully set the 'groupA' configuration to the content of")
+        .containsOutput("Configure the servers in 'groupA' group").containsOutput("server-1")
+        .containsOutput("Cache successfully reloaded.");
+
+    // start another server that belongs to both groupA and groupB
+    properties.setProperty(GROUPS_NAME, "groupA,groupB");
+    MemberVM serverB = cluster.startServerVM(2, properties, locator.getPort());
+
+    // try to set the cluster configuration of groupB, in this case, we can't bounce serverB because
+    // it's already configured by groupA
+    gfsh.executeAndAssertThat(commandWithFile + " --group=groupB").statusIsError()
+        .containsOutput("Can not configure servers that are already configured.");
+  }
+}
diff --git a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigImportDUnitTest.java b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigImportDUnitTest.java
index 2bb5916..180d0d4 100644
--- a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigImportDUnitTest.java
+++ b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigImportDUnitTest.java
@@ -88,12 +88,12 @@ public class ClusterConfigImportDUnitTest extends ClusterConfigTestBase {
     serverProps.setProperty("groups", "group2");
     MemberVM server2 = lsRule.startServerVM(2, serverProps, locatorVM.getPort());
 
-    CommandResult result = gfshConnector
-        .executeCommand("import cluster-configuration --zip-file-name=" + clusterConfigZipPath);
-    assertThat(result.getMessageFromContent())
-        .contains("Successfully applied the imported cluster configuration on server-1");
-    assertThat(result.getMessageFromContent())
-        .contains("Successfully applied the imported cluster configuration on server-2");
+    gfshConnector
+        .executeAndAssertThat(
+            "import cluster-configuration --zip-file-name=" + clusterConfigZipPath)
+        .statusIsSuccess().containsOutput("Cluster configuration successfully imported.")
+        .containsOutput("Configure the servers in 'cluster' group:").containsOutput("server-1")
+        .containsOutput("server-2");
     new ClusterConfig(CLUSTER).verify(server1);
     new ClusterConfig(CLUSTER, GROUP2).verify(server2);
 
@@ -110,8 +110,7 @@ public class ClusterConfigImportDUnitTest extends ClusterConfigTestBase {
     gfshConnector
         .executeAndAssertThat(
             "import cluster-configuration --zip-file-name=" + clusterConfigZipPath)
-        .statusIsError()
-        .containsOutput("Running servers have existing cluster configuration applied already.");
+        .statusIsError().containsOutput("Can not configure servers that are already configured.");
   }
 
   @Test
@@ -123,8 +122,7 @@ public class ClusterConfigImportDUnitTest extends ClusterConfigTestBase {
     gfshConnector
         .executeAndAssertThat(
             "import cluster-configuration --zip-file-name=" + clusterConfigZipPath)
-        .statusIsError()
-        .containsOutput("Running servers have existing cluster configuration applied already.");
+        .statusIsError().containsOutput("Can not configure servers that are already configured.");
   }
 
   @Test
diff --git a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigStartMemberDUnitTest.java b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigStartMemberDUnitTest.java
index aeb7c4a..e3b8279 100644
--- a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigStartMemberDUnitTest.java
+++ b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigStartMemberDUnitTest.java
@@ -96,7 +96,6 @@ public class ClusterConfigStartMemberDUnitTest extends ClusterConfigTestBase {
     properties.setProperty(CLUSTER_CONFIGURATION_DIR, locatorDir.getCanonicalPath());
 
     MemberVM locator = lsRule.startLocatorVM(0, properties);
-    CONFIG_FROM_ZIP.verify(locator);
 
     return locator;
   }
diff --git a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigWithCallbacksDUnitTest.java b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigWithCallbacksDUnitTest.java
index a984e9f..c0a8b17 100644
--- a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigWithCallbacksDUnitTest.java
+++ b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/configuration/ClusterConfigWithCallbacksDUnitTest.java
@@ -80,8 +80,7 @@ public class ClusterConfigWithCallbacksDUnitTest {
     // import cluster configuration with a running server
     gfsh.executeAndAssertThat(
         "import cluster-configuration --zip-file-name=" + clusterConfigZip.getAbsolutePath())
-        .statusIsSuccess()
-        .containsOutput("Successfully applied the imported cluster configuration on server-1");
+        .statusIsSuccess().containsOutput("Configure the servers in 'cluster' group");
 
     // assert that the callbacks are properly hooked up with the region
     server.invoke(() -> {
diff --git a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/security/MultiGfshDUnitTest.java b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/security/MultiGfshDUnitTest.java
index 765c5e5..b4d1b98 100644
--- a/geode-core/src/distributedTest/java/org/apache/geode/management/internal/security/MultiGfshDUnitTest.java
+++ b/geode-core/src/distributedTest/java/org/apache/geode/management/internal/security/MultiGfshDUnitTest.java
@@ -89,6 +89,9 @@ public class MultiGfshDUnitTest {
 
         CommandResult result = gfsh.executeCommand(command.getCommand());
 
+        if (!(result.getResultData() instanceof ErrorResultData)) {
+          break;
+        }
         int errorCode = ((ErrorResultData) result.getResultData()).getErrorCode();
 
         // for some commands there are pre execution checks to check for user input error, will skip
diff --git a/geode-core/src/main/java/org/apache/geode/distributed/internal/InternalConfigurationPersistenceService.java b/geode-core/src/main/java/org/apache/geode/distributed/internal/InternalConfigurationPersistenceService.java
index 14a8f83..cbc0ee5 100644
--- a/geode-core/src/main/java/org/apache/geode/distributed/internal/InternalConfigurationPersistenceService.java
+++ b/geode-core/src/main/java/org/apache/geode/distributed/internal/InternalConfigurationPersistenceService.java
@@ -31,11 +31,9 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -53,6 +51,7 @@ import javax.xml.transform.TransformerFactoryConfigurationError;
 
 import com.healthmarketscience.rmiio.RemoteInputStream;
 import com.healthmarketscience.rmiio.RemoteInputStreamClient;
+import joptsimple.internal.Strings;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.io.IOUtils;
@@ -165,6 +164,7 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
       }
       clusterConfigRootDir = diskDir.getCanonicalPath();
     }
+    clusterConfigRootDir = new File(clusterConfigRootDir).getAbsolutePath();
 
     // resolve the file paths
     String configDiskDirName =
@@ -525,7 +525,7 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
       if (loadSharedConfigFromDir) {
         logger.info("Reading cluster configuration from '{}' directory",
             InternalConfigurationPersistenceService.CLUSTER_CONFIG_ARTIFACTS_DIR_NAME);
-        loadSharedConfigurationFromDisk();
+        loadSharedConfigurationFromDir(new File(this.configDirPath));
       } else {
         persistSecuritySettings(configRegion);
         // for those groups that have jar files, need to download the jars from other locators
@@ -702,6 +702,20 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
     return getConfigurationRegion().get(groupName);
   }
 
+  public void setConfiguration(String groupName, Configuration configuration) {
+    getConfigurationRegion().put(groupName, configuration);
+  }
+
+  public boolean hasXmlConfiguration() {
+    Region<String, Configuration> configRegion = getConfigurationRegion();
+    return configRegion.values().stream().anyMatch(c -> c.getCacheXmlContent() != null);
+  }
+
+  public Map<String, Configuration> getEntireConfiguration() {
+    Set<String> keys = getConfigurationRegion().keySet();
+    return getConfigurationRegion().getAll(keys);
+  }
+
   /**
    * Returns the path of Shared configuration directory
    *
@@ -711,6 +725,10 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
     return configDirPath;
   }
 
+  public Path getClusterConfigDirPath() {
+    return Paths.get(configDirPath);
+  }
+
   /**
    * Gets the current status of the ConfigurationPersistenceService If the status is started , it
    * determines if the shared configuration is waiting for new configuration on other locators
@@ -735,20 +753,34 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
     return this.status.get();
   }
 
-  /**
-   * Loads the internal region with the configuration in the configDirPath
-   */
-  public void loadSharedConfigurationFromDisk()
+  // configDir is the dir that has all the groups structure underneath it.
+  public void loadSharedConfigurationFromDir(File configDir)
       throws SAXException, ParserConfigurationException, TransformerException, IOException {
     lockSharedConfiguration();
-    File[] groupNames =
-        new File(this.configDirPath).listFiles((FileFilter) DirectoryFileFilter.INSTANCE);
-
     try {
+      File[] groupNames = configDir.listFiles((FileFilter) DirectoryFileFilter.INSTANCE);
+      boolean needToCopyJars = true;
+      if (configDir.getAbsolutePath().equals(getSharedConfigurationDirPath())) {
+        needToCopyJars = false;
+      }
+
+      logger.info("loading the cluster configuration: ");
       Map<String, Configuration> sharedConfiguration = new HashMap<>();
       for (File groupName : groupNames) {
         Configuration configuration = readConfiguration(groupName);
+        logger.info(configuration.getConfigName() + " xml content: \n"
+            + configuration.getCacheXmlContent());
+        logger.info(configuration.getConfigName() + " properties: "
+            + configuration.getGemfireProperties().size());
+        logger.info(configuration.getConfigName() + " jars: "
+            + Strings.join(configuration.getJarNames(), ", "));
         sharedConfiguration.put(groupName.getName(), configuration);
+        if (needToCopyJars && configuration.getJarNames().size() > 0) {
+          Path groupDirPath = createConfigDirIfNecessary(configuration.getConfigName()).toPath();
+          for (String jarName : configuration.getJarNames()) {
+            Files.copy(groupName.toPath().resolve(jarName), groupDirPath.resolve(jarName));
+          }
+        }
       }
       Region<String, Configuration> clusterRegion = getConfigurationRegion();
       clusterRegion.clear();
@@ -765,24 +797,10 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
     }
   }
 
-  public void renameExistingSharedConfigDirectory() {
-    File configDirFile = new File(this.configDirPath);
-    if (configDirFile.exists()) {
-      String configDirFileName2 = CLUSTER_CONFIG_ARTIFACTS_DIR_NAME
-          + new SimpleDateFormat("yyyyMMddhhmm").format(new Date()) + '.' + System.nanoTime();
-      try {
-        File configDirFile2 = new File(configDirFile.getParent(), configDirFileName2);
-        FileUtils.moveDirectory(configDirFile, configDirFile2);
-      } catch (IOException e) {
-        logger.info(e);
-      }
-    }
-  }
-
-
   // Write the content of xml and properties into the file system for exporting purpose
-  public void writeConfigToFile(final Configuration configuration) throws IOException {
-    File configDir = createConfigDirIfNecessary(configuration.getConfigName());
+  public void writeConfigToFile(final Configuration configuration, File rootDir)
+      throws IOException {
+    File configDir = createConfigDirIfNecessary(rootDir, configuration.getConfigName());
 
     File propsFile = new File(configDir, configuration.getPropertiesFileName());
     BufferedWriter bw = new BufferedWriter(new FileWriter(propsFile));
@@ -791,6 +809,20 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
 
     File xmlFile = new File(configDir, configuration.getCacheXmlFileName());
     FileUtils.writeStringToFile(xmlFile, configuration.getCacheXmlContent(), "UTF-8");
+
+    // copy the jars if the rootDir is different than the configDirPath
+    if (rootDir.getAbsolutePath().equals(getSharedConfigurationDirPath())) {
+      return;
+    }
+
+    File locatorConfigDir =
+        new File(getSharedConfigurationDirPath(), configuration.getConfigName());
+    if (locatorConfigDir.exists()) {
+      File[] jarFiles = locatorConfigDir.listFiles(x -> x.getName().endsWith(".jar"));
+      for (File file : jarFiles) {
+        Files.copy(file.toPath(), configDir.toPath().resolve(file.getName()));
+      }
+    }
   }
 
   public boolean lockSharedConfiguration() {
@@ -882,7 +914,11 @@ public class InternalConfigurationPersistenceService implements ConfigurationPer
    * Creates a directory for this configuration if it doesn't already exist.
    */
   private File createConfigDirIfNecessary(final String configName) throws IOException {
-    File clusterConfigDir = new File(getSharedConfigurationDirPath());
+    return createConfigDirIfNecessary(new File(getSharedConfigurationDirPath()), configName);
+  }
+
+  private File createConfigDirIfNecessary(File clusterConfigDir, final String configName)
+      throws IOException {
     if (!clusterConfigDir.exists()) {
       if (!clusterConfigDir.mkdirs()) {
         throw new IOException("Cannot create directory : " + getSharedConfigurationDirPath());
diff --git a/geode-core/src/main/java/org/apache/geode/management/cli/GfshCommand.java b/geode-core/src/main/java/org/apache/geode/management/cli/GfshCommand.java
index 1841eac..71565a2 100644
--- a/geode-core/src/main/java/org/apache/geode/management/cli/GfshCommand.java
+++ b/geode-core/src/main/java/org/apache/geode/management/cli/GfshCommand.java
@@ -83,10 +83,16 @@ public abstract class GfshCommand implements CommandMarker {
     return locator == null ? null : locator.getConfigurationPersistenceService();
   }
 
+
   public void setCache(Cache cache) {
     this.cache = (InternalCache) cache;
   }
 
+  public boolean isSharedConfigurationRunning() {
+    InternalLocator locator = InternalLocator.getLocator();
+    return locator != null && locator.isSharedConfigurationRunning();
+  }
+
   public Subject getSubject() {
     return cache.getSecurityService().getSubject();
   }
@@ -140,7 +146,7 @@ public abstract class GfshCommand implements CommandMarker {
    * if no members matches these names, an empty set would return, this does not include locators
    */
   public Set<DistributedMember> findMembers(String[] groups, String[] members) {
-    return CliUtil.findMembers(groups, members, (InternalCache) getCache());
+    return CliUtil.findMembers(groups, members, cache);
   }
 
   /**
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommand.java
new file mode 100644
index 0000000..aa55092
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommand.java
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.apache.geode.management.internal.cli.i18n.CliStrings.GROUP;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Properties;
+import java.util.Set;
+
+import joptsimple.internal.Strings;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.logging.log4j.Logger;
+import org.springframework.shell.core.annotation.CliCommand;
+import org.springframework.shell.core.annotation.CliOption;
+
+import org.apache.geode.distributed.ConfigurationPersistenceService;
+import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.cli.CliMetaData;
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.management.internal.cli.i18n.CliStrings;
+import org.apache.geode.management.internal.cli.result.model.DataResultModel;
+import org.apache.geode.management.internal.cli.result.model.FileResultModel;
+import org.apache.geode.management.internal.cli.result.model.InfoResultModel;
+import org.apache.geode.management.internal.cli.result.model.ResultModel;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.geode.management.internal.security.ResourceOperation;
+import org.apache.geode.security.ResourcePermission.Operation;
+import org.apache.geode.security.ResourcePermission.Resource;
+
+;
+
+/**
+ * Commands for the cluster configuration
+ */
+@SuppressWarnings("unused")
+public class ExportClusterConfigurationCommand extends InternalGfshCommand {
+  private static Logger logger = LogService.getLogger();
+  public static final String XML_FILE = "xml-file";
+
+  @CliCommand(value = {CliStrings.EXPORT_SHARED_CONFIG},
+      help = CliStrings.EXPORT_SHARED_CONFIG__HELP)
+  @CliMetaData(
+      interceptor = "org.apache.geode.management.internal.cli.commands.ExportClusterConfigurationCommand$ExportInterceptor",
+      relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
+  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
+  public ResultModel exportSharedConfig(
+      @CliOption(key = GROUP,
+          specifiedDefaultValue = ConfigurationPersistenceService.CLUSTER_CONFIG,
+          unspecifiedDefaultValue = ConfigurationPersistenceService.CLUSTER_CONFIG) String group,
+      @CliOption(key = XML_FILE) String xmlFile,
+      @CliOption(key = CliStrings.EXPORT_SHARED_CONFIG__FILE,
+          help = CliStrings.EXPORT_SHARED_CONFIG__FILE__HELP) String zipFileName)
+      throws IOException {
+
+    if (!isSharedConfigurationRunning()) {
+      return ResultModel.createError("Cluster configuration service is not running.");
+    }
+
+    ResultModel result = new ResultModel();
+    if (zipFileName != null) {
+      Path tempDir = Files.createTempDirectory("temp");
+      Path exportedDir = tempDir.resolve("cluster_config");
+      Path zipFile = tempDir.resolve(FilenameUtils.getName(zipFileName));
+      InternalConfigurationPersistenceService sc = getConfigurationPersistenceService();
+      try {
+        for (Configuration config : sc.getEntireConfiguration().values()) {
+          sc.writeConfigToFile(config, exportedDir.toFile());
+        }
+        ZipUtils.zipDirectory(exportedDir, zipFile);
+        result.addFile(zipFile.toFile(), FileResultModel.FILE_TYPE_BINARY);
+      } catch (Exception e) {
+        logger.error("unable to export configuration.", e);
+      } finally {
+        FileUtils.deleteQuietly(tempDir.toFile());
+      }
+    } else {
+      Configuration configuration = getConfigurationPersistenceService().getConfiguration(group);
+      if (configuration == null) {
+        return ResultModel.createError("No cluster configuration for '" + group + "'.");
+      }
+
+      String cacheXmlContent = configuration.getCacheXmlContent();
+      if (cacheXmlContent != null) {
+        InfoResultModel xmlSection = result.addInfo("xml");
+        xmlSection.setHeader(configuration.getCacheXmlFileName() + ": ");
+        xmlSection.addLine(cacheXmlContent);
+      }
+
+      Properties gemfireProperties = configuration.getGemfireProperties();
+      if (gemfireProperties.size() > 0) {
+        DataResultModel propertySection = result.addData("properties");
+        propertySection.setHeader("Properties: ");
+        propertySection.addData(gemfireProperties);
+      }
+
+      Set<String> jarNames = configuration.getJarNames();
+      if (jarNames.size() > 0) {
+        InfoResultModel jarSection = result.addInfo("jars");
+        jarSection.setHeader("Jars: ");
+        jarSection.addLine(Strings.join(jarNames, ", "));
+      }
+    }
+
+    return result;
+  }
+
+  /**
+   * Interceptor used by gfsh to intercept execution of export shared config command at "shell".
+   */
+  public static class ExportInterceptor extends AbstractCliAroundInterceptor {
+    private String saveDirString;
+    private static final Logger logger = LogService.getLogger();
+
+    @Override
+    public ResultModel preExecution(GfshParseResult parseResult) {
+      String zip = parseResult.getParamValueAsString(CliStrings.EXPORT_SHARED_CONFIG__FILE);
+      String xmlFile = parseResult.getParamValueAsString(XML_FILE);
+      String group = parseResult.getParamValueAsString(GROUP);
+
+      if (group != null && group.contains(",")) {
+        return ResultModel.createError("Only a single group name is supported.");
+      }
+
+      if (zip != null && xmlFile != null) {
+        return ResultModel.createError("Zip file and xml File can't both be specified.");
+      }
+
+      if (zip != null && !group.equals(ConfigurationPersistenceService.CLUSTER_CONFIG)) {
+        return ResultModel.createError("zip file can not be exported with a specific group.");
+      }
+
+      if (zip != null && !zip.endsWith(".zip")) {
+        return ResultModel
+            .createError(CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, ".zip"));
+      }
+
+      String exportedFile = (zip != null) ? zip : xmlFile;
+      if (exportedFile != null) {
+        // make sure the file does not exist so that we don't overwrite some existing file
+        File file = new File(exportedFile).getAbsoluteFile();
+        if (file.exists()) {
+          String message = file.getAbsolutePath() + " already exists. Overwrite it? ";
+          if (readYesNo(message, Response.YES) == Response.NO) {
+            return ResultModel.createError("Aborted. " + exportedFile + "already exists.");
+          }
+        }
+      }
+
+      return ResultModel.createInfo("");
+    }
+
+    @Override
+    public ResultModel postExecution(GfshParseResult parseResult, ResultModel result, Path tempFile)
+        throws IOException {
+      if (result.getStatus() == Result.Status.ERROR) {
+        return result;
+      }
+      String xmlFile = parseResult.getParamValueAsString(XML_FILE);
+      String zipFile = parseResult.getParamValueAsString(CliStrings.EXPORT_SHARED_CONFIG__FILE);
+      String group = parseResult.getParamValueAsString(GROUP);
+      // save the result to the file
+      if (xmlFile != null) {
+        InfoResultModel xmlSection = result.getInfoSection("xml");
+        if (xmlSection == null) {
+          InfoResultModel info = result.addInfo("info");
+          info.addLine(String.format("xml content is empty. %s is not created.", xmlFile));
+        } else {
+          File file = new File(xmlFile).getAbsoluteFile();
+          FileUtils.write(file, Strings.join(xmlSection.getContent(), System.lineSeparator()),
+              Charset.defaultCharset());
+          xmlSection.removeLine(0);
+          xmlSection.addLine("xml content exported to " + file.getAbsolutePath());
+        }
+      } else if (zipFile != null) {
+        // delete the existing file since at this point, user is OK to replace the old zip.
+        File file = new File(zipFile);
+        if (file.exists()) {
+          FileUtils.deleteQuietly(file);
+        }
+        FileResultModel fileResultModel = result.getFiles().values().iterator().next();
+        fileResultModel.writeFile(file.getParentFile().getAbsolutePath());
+      }
+      return result;
+    }
+  }
+}
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportConfigCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportConfigCommand.java
index 7e10fe3..6e80a6b 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportConfigCommand.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportConfigCommand.java
@@ -34,7 +34,6 @@ import org.apache.geode.management.internal.cli.GfshParseResult;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
 import org.apache.geode.management.internal.cli.functions.ExportConfigFunction;
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
-import org.apache.geode.management.internal.cli.result.ResultData;
 import org.apache.geode.management.internal.cli.result.model.FileResultModel;
 import org.apache.geode.management.internal.cli.result.model.InfoResultModel;
 import org.apache.geode.management.internal.cli.result.model.ResultModel;
@@ -88,10 +87,8 @@ public class ExportConfigCommand extends InternalGfshCommand {
         String cacheFileName = result.getMemberIdOrName() + "-cache.xml";
         String propsFileName = result.getMemberIdOrName() + "-gf.properties";
         String[] fileContent = (String[]) result.getSerializables();
-        crm.addFile(cacheFileName, fileContent[0].getBytes(), ResultData.FILE_TYPE_TEXT,
-            "Downloading Cache XML file: ");
-        crm.addFile(propsFileName, fileContent[1].getBytes(), ResultData.FILE_TYPE_TEXT,
-            "Downloading properties file: ");
+        crm.addFile(cacheFileName, fileContent[0], "Downloading Cache XML file: " + cacheFileName);
+        crm.addFile(propsFileName, fileContent[1], "Downloading properties file: " + propsFileName);
       }
     }
 
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java
deleted file mode 100644
index 41e973d..0000000
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.geode.management.internal.cli.commands;
-
-import static java.util.stream.Collectors.joining;
-import static java.util.stream.Collectors.toSet;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Collection;
-import java.util.List;
-import java.util.Set;
-
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.TransformerException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.logging.log4j.Logger;
-import org.springframework.shell.core.annotation.CliCommand;
-import org.springframework.shell.core.annotation.CliOption;
-import org.xml.sax.SAXException;
-
-import org.apache.geode.cache.configuration.CacheConfig;
-import org.apache.geode.cache.execute.ResultCollector;
-import org.apache.geode.distributed.DistributedMember;
-import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
-import org.apache.geode.distributed.internal.InternalLocator;
-import org.apache.geode.internal.logging.LogService;
-import org.apache.geode.management.cli.CliMetaData;
-import org.apache.geode.management.cli.Result;
-import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
-import org.apache.geode.management.internal.cli.GfshParseResult;
-import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
-import org.apache.geode.management.internal.cli.i18n.CliStrings;
-import org.apache.geode.management.internal.cli.remote.CommandExecutionContext;
-import org.apache.geode.management.internal.cli.result.CommandResult;
-import org.apache.geode.management.internal.cli.result.ErrorResultData;
-import org.apache.geode.management.internal.cli.result.FileResult;
-import org.apache.geode.management.internal.cli.result.InfoResultData;
-import org.apache.geode.management.internal.cli.result.ResultBuilder;
-import org.apache.geode.management.internal.cli.result.ResultData;
-import org.apache.geode.management.internal.cli.shell.Gfsh;
-import org.apache.geode.management.internal.configuration.domain.Configuration;
-import org.apache.geode.management.internal.configuration.functions.GetRegionNamesFunction;
-import org.apache.geode.management.internal.configuration.functions.RecreateCacheFunction;
-import org.apache.geode.management.internal.configuration.utils.ZipUtils;
-import org.apache.geode.management.internal.security.ResourceOperation;
-import org.apache.geode.security.ResourcePermission.Operation;
-import org.apache.geode.security.ResourcePermission.Resource;
-
-/**
- * Commands for the cluster configuration
- */
-@SuppressWarnings("unused")
-public class ExportImportClusterConfigurationCommands extends InternalGfshCommand {
-
-  @CliCommand(value = {CliStrings.EXPORT_SHARED_CONFIG},
-      help = CliStrings.EXPORT_SHARED_CONFIG__HELP)
-  @CliMetaData(
-      interceptor = "org.apache.geode.management.internal.cli.commands.ExportImportClusterConfigurationCommands$ExportInterceptor",
-      relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
-  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
-  public Result exportSharedConfig(@CliOption(key = {CliStrings.EXPORT_SHARED_CONFIG__FILE},
-      mandatory = true, help = CliStrings.EXPORT_SHARED_CONFIG__FILE__HELP) String zipFileName) {
-
-    InternalLocator locator = InternalLocator.getLocator();
-    if (locator == null || !locator.isSharedConfigurationRunning()) {
-      return ResultBuilder.createGemFireErrorResult(CliStrings.SHARED_CONFIGURATION_NOT_STARTED);
-    }
-
-    Path tempDir;
-    try {
-      tempDir = Files.createTempDirectory("clusterConfig");
-    } catch (IOException e) {
-      if (Gfsh.getCurrentInstance() != null) {
-        Gfsh.getCurrentInstance().logSevere(e.getMessage(), e);
-      }
-      ErrorResultData errorData =
-          ResultBuilder.createErrorResultData().addLine("Unable to create temp directory");
-      return ResultBuilder.buildResult(errorData);
-    }
-
-    File zipFile = tempDir.resolve("exportedCC.zip").toFile();
-    InternalConfigurationPersistenceService sc = locator.getConfigurationPersistenceService();
-
-    Result result;
-    try {
-      for (Configuration config : sc.getConfigurationRegion().values()) {
-        sc.writeConfigToFile(config);
-      }
-      ZipUtils.zipDirectory(sc.getSharedConfigurationDirPath(), zipFile.getCanonicalPath());
-
-      InfoResultData infoData = ResultBuilder.createInfoResultData();
-      byte[] byteData = FileUtils.readFileToByteArray(zipFile);
-      infoData.addAsFile(zipFileName, byteData, ResultData.FILE_TYPE_BINARY,
-          CliStrings.EXPORT_SHARED_CONFIG__DOWNLOAD__MSG, false);
-      result = ResultBuilder.buildResult(infoData);
-    } catch (Exception e) {
-      ErrorResultData errorData = ResultBuilder.createErrorResultData();
-      errorData.addLine("Export failed");
-      if (Gfsh.getCurrentInstance() != null) {
-        Gfsh.getCurrentInstance().logSevere(e.getMessage(), e);
-      }
-      result = ResultBuilder.buildResult(errorData);
-    } finally {
-      zipFile.delete();
-    }
-
-    return result;
-  }
-
-  @CliCommand(value = {CliStrings.IMPORT_SHARED_CONFIG},
-      help = CliStrings.IMPORT_SHARED_CONFIG__HELP)
-  @CliMetaData(
-      interceptor = "org.apache.geode.management.internal.cli.commands.ExportImportClusterConfigurationCommands$ImportInterceptor",
-      isFileUploaded = true, relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
-  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.MANAGE)
-  @SuppressWarnings("unchecked")
-  public Result importSharedConfig(
-      @CliOption(key = {CliStrings.IMPORT_SHARED_CONFIG__ZIP}, mandatory = true,
-          help = CliStrings.IMPORT_SHARED_CONFIG__ZIP__HELP) String zip)
-      throws IOException, TransformerException, SAXException, ParserConfigurationException {
-
-    InternalConfigurationPersistenceService sc =
-        (InternalConfigurationPersistenceService) getConfigurationPersistenceService();
-
-    if (sc == null) {
-      return ResultBuilder.createGemFireErrorResult(CliStrings.SHARED_CONFIGURATION_NOT_STARTED);
-    }
-
-    Set<DistributedMember> servers = getAllNormalMembers();
-
-    // check if running servers are vanilla servers
-    if (servers.size() > 0) {
-      Set<String> groupNames = sc.getConfigurationRegion().keySet();
-      for (String groupName : groupNames) {
-        CacheConfig cacheConfig = sc.getCacheConfig(groupName);
-        if (cacheConfig != null) {
-          if (cacheConfig.getRegions().size() > 0 || cacheConfig.getAsyncEventQueues().size() > 0
-              || cacheConfig.getDiskStores().size() > 0
-              || cacheConfig.getCustomCacheElements().size() > 0
-              || cacheConfig.getJndiBindings().size() > 0
-              || cacheConfig.getGatewayReceiver() != null
-              || cacheConfig.getGatewaySenders().size() > 0) {
-            return ResultBuilder.createGemFireErrorResult(
-                "Running servers have existing cluster configuration applied already.");
-          }
-        }
-      }
-
-      // further checks in case any servers has regions not defined by the cluster configuration to
-      // avoid data loss.
-      Set<String> serverRegionNames = servers.stream().map(this::getRegionNamesOnServer)
-          .flatMap(Collection::stream).collect(toSet());
-
-      if (!serverRegionNames.isEmpty()) {
-        return ResultBuilder
-            .createGemFireErrorResult("Cannot import cluster configuration with existing regions: "
-                + serverRegionNames.stream().collect(joining(",")));
-      }
-    }
-
-    List<String> filePathFromShell = CommandExecutionContext.getFilePathFromShell();
-
-    Result result;
-    InfoResultData infoData = ResultBuilder.createInfoResultData();
-    String zipFilePath = filePathFromShell.get(0);
-
-    // backup the old config
-    for (Configuration config : sc.getConfigurationRegion().values()) {
-      sc.writeConfigToFile(config);
-    }
-    sc.renameExistingSharedConfigDirectory();
-
-    ZipUtils.unzip(zipFilePath, sc.getSharedConfigurationDirPath());
-
-    // load it from the disk
-    sc.loadSharedConfigurationFromDisk();
-    infoData.addLine(CliStrings.IMPORT_SHARED_CONFIG__SUCCESS__MSG);
-
-    // Bounce the cache of each member
-    if (servers.size() > 0) {
-      List<CliFunctionResult> functionResults =
-          executeAndGetFunctionResult(new RecreateCacheFunction(), null, servers);
-
-      for (CliFunctionResult functionResult : functionResults) {
-        if (functionResult.isSuccessful()) {
-          infoData.addLine("Successfully applied the imported cluster configuration on "
-              + functionResult.getMemberIdOrName());
-        } else {
-          infoData.addLine("Failed to apply the imported cluster configuration on "
-              + functionResult.getMemberIdOrName() + " due to " + functionResult.getMessage());
-        }
-      }
-    }
-
-    result = ResultBuilder.buildResult(infoData);
-    return result;
-  }
-
-  private Set<String> getRegionNamesOnServer(DistributedMember server) {
-    ResultCollector rc = executeFunction(new GetRegionNamesFunction(), null, server);
-    List<Set<String>> results = (List<Set<String>>) rc.getResult();
-
-    return results.get(0);
-  }
-
-  /**
-   * Interceptor used by gfsh to intercept execution of export shared config command at "shell".
-   */
-  public static class ExportInterceptor extends AbstractCliAroundInterceptor {
-    private String saveDirString;
-    private static final Logger logger = LogService.getLogger();
-
-    @Override
-    public Result preExecution(GfshParseResult parseResult) {
-      String zip = parseResult.getParamValueAsString(CliStrings.EXPORT_SHARED_CONFIG__FILE);
-
-      if (!zip.endsWith(".zip")) {
-        return ResultBuilder
-            .createUserErrorResult(CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, ".zip"));
-      }
-      return ResultBuilder.createInfoResult("OK");
-    }
-
-    @Override
-    public CommandResult postExecution(GfshParseResult parseResult, CommandResult commandResult,
-        Path tempFile) {
-      if (commandResult.hasIncomingFiles()) {
-        try {
-          commandResult.saveIncomingFiles(System.getProperty("user.dir"));
-          return commandResult;
-        } catch (IOException ioex) {
-          logger.error(ioex);
-          return ResultBuilder.createShellClientErrorResult(
-              CliStrings.EXPORT_SHARED_CONFIG__UNABLE__TO__EXPORT__CONFIG + ": "
-                  + ioex.getMessage());
-        }
-      }
-      return null;
-    }
-  }
-
-  public static class ImportInterceptor extends AbstractCliAroundInterceptor {
-
-    public Result preExecution(GfshParseResult parseResult) {
-      String zip = parseResult.getParamValueAsString(CliStrings.IMPORT_SHARED_CONFIG__ZIP);
-
-      zip = StringUtils.trim(zip);
-
-      if (zip == null) {
-        return ResultBuilder.createUserErrorResult(CliStrings.format(
-            CliStrings.IMPORT_SHARED_CONFIG__PROVIDE__ZIP, CliStrings.IMPORT_SHARED_CONFIG__ZIP));
-      }
-      if (!zip.endsWith(CliStrings.ZIP_FILE_EXTENSION)) {
-        return ResultBuilder.createUserErrorResult(
-            CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, CliStrings.ZIP_FILE_EXTENSION));
-      }
-
-      FileResult fileResult = new FileResult();
-
-      File zipFile = new File(zip);
-      if (!zipFile.exists()) {
-        return ResultBuilder.createUserErrorResult(zip + " not found");
-      }
-
-      fileResult.addFile(zipFile);
-
-      return fileResult;
-    }
-  }
-
-}
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommand.java
new file mode 100644
index 0000000..a7256a0
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommand.java
@@ -0,0 +1,266 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toSet;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.text.SimpleDateFormat;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.Set;
+
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.TransformerException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.logging.log4j.Logger;
+import org.springframework.shell.core.annotation.CliCommand;
+import org.springframework.shell.core.annotation.CliOption;
+import org.xml.sax.SAXException;
+
+import org.apache.geode.cache.execute.ResultCollector;
+import org.apache.geode.distributed.ConfigurationPersistenceService;
+import org.apache.geode.distributed.DistributedMember;
+import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.cli.CliMetaData;
+import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
+import org.apache.geode.management.internal.cli.i18n.CliStrings;
+import org.apache.geode.management.internal.cli.remote.CommandExecutionContext;
+import org.apache.geode.management.internal.cli.result.FileResult;
+import org.apache.geode.management.internal.cli.result.model.InfoResultModel;
+import org.apache.geode.management.internal.cli.result.model.ResultModel;
+import org.apache.geode.management.internal.cli.result.model.TabularResultModel;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import org.apache.geode.management.internal.configuration.functions.GetRegionNamesFunction;
+import org.apache.geode.management.internal.configuration.functions.RecreateCacheFunction;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.geode.management.internal.security.ResourceOperation;
+import org.apache.geode.security.ResourcePermission.Operation;
+import org.apache.geode.security.ResourcePermission.Resource;
+
+/**
+ * Commands for the cluster configuration
+ */
+@SuppressWarnings("unused")
+public class ImportClusterConfigurationCommand extends InternalGfshCommand {
+  public static Logger logger = LogService.getLogger();
+  public static final String XML_FILE = "xml-file";
+  public static final String ACTION = "action";
+  public static final String ACTION_HELP =
+      "What to do with the running servers if any. APPLY would try to apply the configuration to the empty servers. STAGE would leave the running servers alone.";
+
+  public enum Action {
+    APPLY, STAGE
+  };
+
+  @CliCommand(value = {CliStrings.IMPORT_SHARED_CONFIG},
+      help = CliStrings.IMPORT_SHARED_CONFIG__HELP)
+  @CliMetaData(
+      interceptor = "org.apache.geode.management.internal.cli.commands.ImportClusterConfigurationCommand$ImportInterceptor",
+      isFileUploaded = true, relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
+  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.MANAGE)
+  @SuppressWarnings("unchecked")
+  public ResultModel importSharedConfig(
+      @CliOption(key = CliStrings.GROUP,
+          specifiedDefaultValue = ConfigurationPersistenceService.CLUSTER_CONFIG,
+          unspecifiedDefaultValue = ConfigurationPersistenceService.CLUSTER_CONFIG) String group,
+      @CliOption(key = XML_FILE) String xmlFile,
+      @CliOption(key = ACTION, help = ACTION_HELP, unspecifiedDefaultValue = "APPLY") Action action,
+      @CliOption(key = {CliStrings.IMPORT_SHARED_CONFIG__ZIP},
+          help = CliStrings.IMPORT_SHARED_CONFIG__ZIP__HELP) String zip)
+      throws IOException, TransformerException, SAXException, ParserConfigurationException {
+
+    if (!isSharedConfigurationRunning()) {
+      return ResultModel.createError("Cluster configuration service is not running.");
+    }
+
+    InternalConfigurationPersistenceService ccService = getConfigurationPersistenceService();
+    Set<DistributedMember> servers = findMembers(group);
+    File file = getUploadedFile();
+
+    ResultModel result = new ResultModel();
+    InfoResultModel infoSection = result.addInfo(ResultModel.INFO_SECTION);
+    ccService.lockSharedConfiguration();
+    try {
+      if (action == Action.APPLY && servers.size() > 0) {
+        // make sure the servers are vanilla servers, users hasn't done anything on them.
+        // server might belong to multiple group, so we can't just check one group's xml is null,
+        // has to make sure
+        // all group's xml are null
+        if (ccService.hasXmlConfiguration()) {
+          return ResultModel.createError("Can not configure servers that are already configured.");
+        }
+        // if no existing cluster configuration, to be safe, further check to see if running
+        // servers has regions already defined
+        Set<String> regionsOnServers = servers.stream().map(this::getRegionNamesOnServer)
+            .flatMap(Collection::stream).collect(toSet());
+
+        if (!regionsOnServers.isEmpty()) {
+          return ResultModel.createError("Can not configure servers with existing regions: "
+              + regionsOnServers.stream().collect(joining(",")));
+        }
+      }
+
+      // backup the old config
+      backupTheOldConfig(ccService);
+
+      if (zip != null) {
+        Path tempDir = Files.createTempDirectory("config");
+        ZipUtils.unzip(file.getAbsolutePath(), tempDir.toAbsolutePath().toString());
+        // load it from the disk
+        ccService.loadSharedConfigurationFromDir(tempDir.toFile());
+        FileUtils.deleteQuietly(tempDir.toFile());
+        infoSection.addLine("Cluster configuration successfully imported.");
+      } else {
+        // update the xml in the cluster configuration service
+        Configuration configuration = ccService.getConfiguration(group);
+        if (configuration == null) {
+          configuration = new Configuration(group);
+        }
+        configuration.setCacheXmlFile(file);
+        ccService.setConfiguration(group, configuration);
+        logger.info(
+            configuration.getConfigName() + "xml content: \n" + configuration.getCacheXmlContent());
+        infoSection.addLine(
+            "Successfully set the '" + group + "' configuration to the content of " + xmlFile);
+      }
+    } finally {
+      FileUtils.deleteQuietly(file);
+      ccService.unlockSharedConfiguration();
+    }
+
+    if (servers.size() > 0) {
+      if (action == Action.APPLY) {
+        List<CliFunctionResult> functionResults =
+            executeAndGetFunctionResult(new RecreateCacheFunction(), null, servers);
+        TabularResultModel tableSection =
+            result.addTableAndSetStatus(ResultModel.MEMBER_STATUS_SECTION, functionResults, false,
+                true);
+        tableSection.setHeader("Configure the servers in '" + group + "' group: ");
+      } else {
+        infoSection.addLine("Existing servers are not affected with this configuration change.");
+      }
+    }
+    return result;
+  }
+
+  void backupTheOldConfig(InternalConfigurationPersistenceService ccService) throws IOException {
+    String backupDir = "cluster_config_" + new SimpleDateFormat("yyyyMMddhhmm").format(new Date())
+        + '.' + System.nanoTime();
+    File backDirFile = ccService.getClusterConfigDirPath().getParent().resolve(backupDir).toFile();
+    for (Configuration config : ccService.getEntireConfiguration().values()) {
+      ccService.writeConfigToFile(config, backDirFile);
+    }
+  }
+
+  File getUploadedFile() {
+    List<String> filePathFromShell = CommandExecutionContext.getFilePathFromShell();
+    File file = new File(filePathFromShell.get(0));
+    return file;
+  }
+
+  Set<DistributedMember> findMembers(String group) {
+    Set<DistributedMember> serversInGroup;
+    if (ConfigurationPersistenceService.CLUSTER_CONFIG.equals(group)) {
+      serversInGroup = getAllNormalMembers();
+    } else {
+      serversInGroup = findMembers(new String[] {group}, null);
+    }
+    return serversInGroup;
+  }
+
+  private Set<String> getRegionNamesOnServer(DistributedMember server) {
+    ResultCollector rc = executeFunction(new GetRegionNamesFunction(), null, server);
+    List<Set<String>> results = (List<Set<String>>) rc.getResult();
+
+    return results.get(0);
+  }
+
+  public static class ImportInterceptor extends AbstractCliAroundInterceptor {
+    public Object preExecution(GfshParseResult parseResult) {
+      String zip = parseResult.getParamValueAsString(CliStrings.IMPORT_SHARED_CONFIG__ZIP);
+      String xmlFile = parseResult.getParamValueAsString(XML_FILE);
+      String group = parseResult.getParamValueAsString(CliStrings.GROUP);
+
+      if (group != null && group.contains(",")) {
+        return ResultModel.createError("Only a single group name is supported.");
+      }
+
+      if (zip == null && xmlFile == null) {
+        return ResultModel.createError("Either a zip file or a xml file is required.");
+      }
+
+      if (zip != null && xmlFile != null) {
+        return ResultModel.createError("Zip file and xml File can't both be specified.");
+      }
+
+
+      if (zip != null) {
+        if (!group.equals(ConfigurationPersistenceService.CLUSTER_CONFIG)) {
+          return ResultModel.createError("zip file can not be imported with a specific group.");
+        }
+
+        if (!zip.endsWith(CliStrings.ZIP_FILE_EXTENSION)) {
+          return ResultModel.createError("Invalid file type. The file extension must be .zip");
+        }
+      }
+
+      if (xmlFile != null) {
+        if (!xmlFile.endsWith(".xml")) {
+          return ResultModel.createError("Invalid file type. The file extension must be .xml.");
+        }
+      }
+
+      String file = (zip != null) ? zip : xmlFile;
+      File importedFile = new File(file).getAbsoluteFile();
+      if (!importedFile.exists()) {
+        return ResultModel.createError("'" + file + "' not found.");
+      }
+
+      String message = "This command will replace the existing cluster configuration, if any, "
+          + "The old configuration will be backed up in the working directory.\n\n" + "Continue? ";
+
+      if (readYesNo(message, Response.YES) == Response.NO) {
+        return ResultModel.createError("Aborted import of " + file + ".");
+      }
+
+      Action action = (Action) parseResult.getParamValue(ACTION);
+      if (action == Action.STAGE) {
+        message =
+            "The configuration you are trying to import should NOT have any conflict with the configuration"
+                + "of existing running servers if any, otherwise you may not be able to start new servers. "
+                + "\nIt is also expected that you would restart the servers with the old configuration after new servers have come up."
+                + "\n\nContinue? ";
+        if (readYesNo(message, Response.YES) == Response.NO) {
+          return ResultModel.createError("Aborted import of " + xmlFile + ".");
+        }
+      }
+
+      FileResult result = new FileResult();
+      result.addFile(importedFile);
+      return result;
+    }
+  }
+
+}
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/InternalGfshCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/InternalGfshCommand.java
index 62f117d..080b148 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/InternalGfshCommand.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/InternalGfshCommand.java
@@ -17,6 +17,7 @@ package org.apache.geode.management.internal.cli.commands;
 import java.util.List;
 import java.util.Objects;
 
+import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
 import org.apache.geode.management.ManagementService;
 import org.apache.geode.management.cli.GfshCommand;
 import org.apache.geode.management.cli.Result;
@@ -66,4 +67,10 @@ public abstract class InternalGfshCommand extends GfshCommand {
   public ManagementService getManagementService() {
     return ManagementService.getExistingManagementService(getCache());
   }
+
+  @Override
+  public InternalConfigurationPersistenceService getConfigurationPersistenceService() {
+    return (InternalConfigurationPersistenceService) super.getConfigurationPersistenceService();
+  }
+
 }
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/ModelCommandResult.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/ModelCommandResult.java
index 36387ac..106401e 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/ModelCommandResult.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/ModelCommandResult.java
@@ -15,6 +15,8 @@
 
 package org.apache.geode.management.internal.cli.result;
 
+import static org.apache.commons.lang.SystemUtils.LINE_SEPARATOR;
+
 import java.io.IOException;
 import java.nio.file.Path;
 import java.util.ArrayList;
@@ -234,7 +236,10 @@ public class ModelCommandResult implements CommandResult {
 
     addSpacedRowInTable(resultTable, result.getHeader());
 
+    int index = 0;
+    int sectionSize = result.getContent().size();
     for (AbstractResultModel section : result.getContent().values()) {
+      index++;
       if (section instanceof DataResultModel) {
         buildData(resultTable, (DataResultModel) section);
       } else if (section instanceof TabularResultModel) {
@@ -245,6 +250,10 @@ public class ModelCommandResult implements CommandResult {
         throw new IllegalArgumentException(
             "Unable to process output for " + section.getClass().getName());
       }
+      // only add the spacer in between the sections.
+      if (index < sectionSize) {
+        addSpacedRowInTable(resultTable, LINE_SEPARATOR);
+      }
     }
 
     addSpacedRowInTable(resultTable, result.getFooter());
@@ -340,6 +349,10 @@ public class ModelCommandResult implements CommandResult {
       InfoResultModel model) {
     TableBuilder.RowGroup rowGroup = resultTable.newRowGroup();
 
+    addRowInRowGroup(rowGroup, model.getHeader());
+
     model.getContent().forEach(c -> rowGroup.newRow().newLeftCol(c));
+
+    addRowInRowGroup(rowGroup, model.getFooter());
   }
 }
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/DataResultModel.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/DataResultModel.java
index e3a9f0a..06889d2 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/DataResultModel.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/DataResultModel.java
@@ -17,6 +17,7 @@ package org.apache.geode.management.internal.cli.result.model;
 
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Properties;
 
 public class DataResultModel extends AbstractResultModel {
 
@@ -37,4 +38,8 @@ public class DataResultModel extends AbstractResultModel {
     data.put(key, value != null ? value.toString() : "");
   }
 
+  public void addData(Properties gemfireProperties) {
+    gemfireProperties.stringPropertyNames().stream()
+        .forEach(k -> addData(k, gemfireProperties.get(k)));
+  }
 }
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/FileResultModel.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/FileResultModel.java
index 0712a14..ca3474f 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/FileResultModel.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/FileResultModel.java
@@ -22,11 +22,15 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.text.MessageFormat;
 
+import org.apache.commons.io.FileUtils;
+
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
 import org.apache.geode.management.internal.cli.result.ResultData;
 import org.apache.geode.management.internal.cli.shell.Gfsh;
 
 public class FileResultModel {
+  public static int FILE_TYPE_BINARY = 0;
+  public static int FILE_TYPE_TEXT = 1;
 
   private String filename;
   private int type;
@@ -36,14 +40,29 @@ public class FileResultModel {
 
   public FileResultModel() {}
 
-  public FileResultModel(String fileName, byte[] data, int fileType, String message) {
+  public FileResultModel(String fileName, String content, String message) {
     this.filename = fileName;
-    this.data = data;
+    this.data = content.getBytes();
     this.length = data.length;
-    this.type = fileType;
+    this.type = FILE_TYPE_TEXT;
     this.message = message;
   }
 
+  public FileResultModel(File file, int fileType) {
+    if (fileType != FILE_TYPE_BINARY && fileType != FILE_TYPE_TEXT) {
+      throw new IllegalArgumentException("Unsupported file type is specified.");
+    }
+
+    this.filename = file.getName();
+    try {
+      this.data = FileUtils.readFileToByteArray(file);
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to read file: " + file.getAbsolutePath(), e);
+    }
+    this.length = data.length;
+    this.type = fileType;
+  }
+
   public String getFilename() {
     return filename;
   }
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/InfoResultModel.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/InfoResultModel.java
index 09fb6dd..8c4a0d8 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/InfoResultModel.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/InfoResultModel.java
@@ -40,4 +40,8 @@ public class InfoResultModel extends AbstractResultModel {
     return this;
   }
 
+  public InfoResultModel removeLine(int index) {
+    messages.remove(index);
+    return this;
+  }
 }
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/ResultModel.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/ResultModel.java
index fc4785a..9a20b62 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/ResultModel.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/result/model/ResultModel.java
@@ -15,10 +15,8 @@
 
 package org.apache.geode.management.internal.cli.result.model;
 
-import static org.apache.geode.management.internal.cli.result.AbstractResultData.FILE_TYPE_BINARY;
-import static org.apache.geode.management.internal.cli.result.AbstractResultData.FILE_TYPE_TEXT;
-
 import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
@@ -148,16 +146,14 @@ public class ResultModel {
     this.files = files;
   }
 
-  public void addFile(String fileName, byte[] data, int fileType, String message) {
-    if (fileType != FILE_TYPE_BINARY && fileType != FILE_TYPE_TEXT) {
-      throw new IllegalArgumentException("Unsupported file type is specified.");
-    }
-
-    FileResultModel fileModel = new FileResultModel(fileName, data, fileType, message + fileName);
+  public void addFile(String fileName, String content, String message) {
+    FileResultModel fileModel = new FileResultModel(fileName, content, message);
     files.put(fileName, fileModel);
   }
 
-
+  public void addFile(File file, int fileType) {
+    files.put(file.getName(), new FileResultModel(file, fileType));
+  }
 
   /**
    * Overloaded method to create an {@code InfoResultModel} section called "info".
@@ -189,10 +185,6 @@ public class ResultModel {
         .map(InfoResultModel.class::cast).collect(Collectors.toList());
   }
 
-  public InfoResultModel getInfoSection(String name) {
-    return (InfoResultModel) sections.get(name);
-  }
-
   public TabularResultModel addTable(String namedSection) {
     Object model = sections.get(namedSection);
     if (model != null) {
@@ -253,7 +245,15 @@ public class ResultModel {
   }
 
   public TabularResultModel getTableSection(String name) {
-    return (TabularResultModel) sections.get(name);
+    return (TabularResultModel) getSection(name);
+  }
+
+  public InfoResultModel getInfoSection(String name) {
+    return (InfoResultModel) getSection(name);
+  }
+
+  public AbstractResultModel getSection(String name) {
+    return sections.get(name);
   }
 
   public DataResultModel addData(String namedSection) {
@@ -280,7 +280,7 @@ public class ResultModel {
   }
 
   public DataResultModel getDataSection(String name) {
-    return (DataResultModel) sections.get(name);
+    return (DataResultModel) getSection(name);
   }
 
   public List<String> getSectionNames() {
@@ -383,4 +383,5 @@ public class ResultModel {
     tabularResultModel.setFooter(footer);
     return result;
   }
+
 }
diff --git a/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt b/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt
index 929dbb1..631ebfb 100644
--- a/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt
+++ b/geode-core/src/main/resources/org/apache/geode/internal/sanctioned-geode-core-serializables.txt
@@ -494,6 +494,7 @@ org/apache/geode/management/internal/beans/stats/StatType,false
 org/apache/geode/management/internal/cli/AbstractCliAroundInterceptor$Response,false,text:java/lang/String
 org/apache/geode/management/internal/cli/CliUtil$DeflaterInflaterData,true,1104813333595216795,data:byte[],dataLength:int
 org/apache/geode/management/internal/cli/commands/CreateJndiBindingCommand$DATASOURCE_TYPE,false,type:java/lang/String
+org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommand$Action,false
 org/apache/geode/management/internal/cli/domain/AsyncEventQueueDetails,true,1,batchSize:int,diskStoreName:java/lang/String,id:java/lang/String,listener:java/lang/String,listenerProperties:java/util/Properties,maxQueueMemory:int,persistent:boolean
 org/apache/geode/management/internal/cli/domain/CacheServerInfo,true,1,bindAddress:java/lang/String,isRunning:boolean,port:int
 org/apache/geode/management/internal/cli/domain/ClassName,true,1,className:java/lang/String,initProperties:java/util/Properties
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommandTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommandTest.java
new file mode 100644
index 0000000..9687e84
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportClusterConfigurationCommandTest.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.apache.geode.management.internal.cli.i18n.CliStrings.EXPORT_SHARED_CONFIG;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Properties;
+
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Test;
+
+import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import org.apache.geode.test.junit.rules.GfshParserRule;
+
+
+public class ExportClusterConfigurationCommandTest {
+  private static String CLUSTER_XML =
+      "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n"
+          + "<cache xmlns=\"http://geode.apache.org/schema/cache\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" copy-on-read=\"false\" is-server=\"false\" lock-lease=\"120\" lock-timeout=\"60\" search-timeout=\"300\" version=\"1.0\" xsi:schemaLocation=\"http://geode.apache.org/schema/cache http://geode.apache.org/schema/cache/cache-1.0.xsd\">\n"
+          + "<region name=\"regionForCluster\">\n"
+          + "    <region-attributes data-policy=\"replicate\" scope=\"distributed-ack\"/>\n"
+          + "  </region>\n" + "</cache>\n";
+
+  @ClassRule
+  public static GfshParserRule gfsh = new GfshParserRule();
+
+  private ExportClusterConfigurationCommand command;
+  private InternalConfigurationPersistenceService ccService;
+  private Configuration configuration;
+
+  @Before
+  public void setUp() throws Exception {
+    ccService = mock(InternalConfigurationPersistenceService.class);
+    command = spy(ExportClusterConfigurationCommand.class);
+    doReturn(true).when(command).isSharedConfigurationRunning();
+    doReturn(ccService).when(command).getConfigurationPersistenceService();
+    configuration = new Configuration("cluster");
+  }
+
+  @Test
+  public void checkDefaultValue() {
+    GfshParseResult parseResult = gfsh.parse(EXPORT_SHARED_CONFIG + " --xml-file=my.xml");
+    assertThat(parseResult.getParamValue("group")).isEqualTo("cluster");
+    assertThat(parseResult.getParamValue("xml-file")).isEqualTo("my.xml");
+
+    parseResult = gfsh.parse(EXPORT_SHARED_CONFIG + " --group=''");
+    assertThat(parseResult.getParamValue("group")).isEqualTo("cluster");
+  }
+
+  @Test
+  public void preValidation() {
+    gfsh.executeAndAssertThat(command, EXPORT_SHARED_CONFIG + " --group='group1,group2'")
+        .statusIsError().containsOutput("Only a single group name is supported");
+
+    gfsh.executeAndAssertThat(command,
+        EXPORT_SHARED_CONFIG + " --zip-file-name=b.zip --xml-file=ab.xml").statusIsError()
+        .containsOutput("Zip file and xml File can't both be specified");
+
+    gfsh.executeAndAssertThat(command,
+        EXPORT_SHARED_CONFIG + " --zip-file-name=b.zip --group=group1").statusIsError()
+        .containsOutput("zip file can not be exported with a specific group");
+
+    gfsh.executeAndAssertThat(command, EXPORT_SHARED_CONFIG + " --zip-file-name=b.zip")
+        .statusIsSuccess();
+  }
+
+  @Test
+  public void clusterConfigurationNotRunning() {
+    doReturn(false).when(command).isSharedConfigurationRunning();
+
+    gfsh.executeAndAssertThat(command, EXPORT_SHARED_CONFIG).statusIsError()
+        .containsOutput("Cluster configuration service is not running");
+  }
+
+  @Test
+  public void groupNotExist() {
+    when(ccService.getConfiguration("groupA")).thenReturn(null);
+    gfsh.executeAndAssertThat(command, EXPORT_SHARED_CONFIG + " --group=groupA").statusIsError()
+        .containsOutput("No cluster configuration for 'groupA'.");
+  }
+
+  @Test
+  public void get() {
+    when(ccService.getConfiguration(any())).thenReturn(configuration);
+    configuration.setCacheXmlContent(CLUSTER_XML);
+    Properties properties = new Properties();
+    properties.put("key1", "value1");
+    properties.put("key2", "value2");
+    configuration.setGemfireProperties(properties);
+    configuration.addJarNames(new String[] {"jar1.jar", "jar2.jar"});
+    gfsh.executeAndAssertThat(command, EXPORT_SHARED_CONFIG).statusIsSuccess()
+        .containsOutput("cluster.xml:").containsOutput("Properties:").containsOutput("Jars:")
+        .containsOutput("jar1.jar, jar2.jar").containsOutput("<?xml version=\\\"1.0\\\"")
+        .containsOutput("</cache>");
+  }
+}
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommandTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommandTest.java
new file mode 100644
index 0000000..25c230a
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ImportClusterConfigurationCommandTest.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.apache.geode.management.internal.cli.i18n.CliStrings.IMPORT_SHARED_CONFIG;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Collections;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import org.apache.geode.distributed.DistributedMember;
+import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import org.apache.geode.test.junit.rules.GfshParserRule;
+
+public class ImportClusterConfigurationCommandTest {
+
+  @ClassRule
+  public static GfshParserRule gfsh = new GfshParserRule();
+
+  @Rule
+  public TemporaryFolder tempFolder = new TemporaryFolder();
+
+  private ImportClusterConfigurationCommand command;
+  private File xmlFile;
+  private InternalConfigurationPersistenceService ccService;
+  private String commandWithFile;
+  private Configuration configuration;
+
+  @Before
+  public void setUp() throws Exception {
+    command = spy(ImportClusterConfigurationCommand.class);
+    ccService = mock(InternalConfigurationPersistenceService.class);
+    xmlFile = tempFolder.newFile("my.xml");
+    commandWithFile = IMPORT_SHARED_CONFIG + " --xml-file=" + xmlFile.getAbsolutePath() + " ";
+    doReturn(true).when(command).isSharedConfigurationRunning();
+    doReturn(ccService).when(command).getConfigurationPersistenceService();
+    doNothing().when(command).backupTheOldConfig(ccService);
+    doReturn(Collections.emptySet()).when(command).findMembers(any());
+    doReturn(xmlFile).when(command).getUploadedFile();
+    configuration = new Configuration("group");
+  }
+
+  @Test
+  public void autoComplete() {
+    GfshParserRule.CommandCandidate commandCandidate =
+        gfsh.complete(IMPORT_SHARED_CONFIG + " --action=");
+    assertThat(commandCandidate.getCandidates()).hasSize(2);
+    assertThat(commandCandidate.getFirstCandidate())
+        .isEqualTo(IMPORT_SHARED_CONFIG + " --action=APPLY");
+  }
+
+  @Test
+  public void mandatory() {
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG).statusIsError()
+        .containsOutput("Either a zip file or a xml file is required");
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG + " --xml-file=''").statusIsError()
+        .containsOutput("Either a zip file or a xml file is required");
+  }
+
+  @Test
+  public void defaultValue() {
+    GfshParseResult parseResult = gfsh.parse(IMPORT_SHARED_CONFIG + " --xml-file=my.xml");
+    assertThat(parseResult.getParamValue("group")).isEqualTo("cluster");
+    assertThat(parseResult.getParamValue("xml-file")).isEqualTo("my.xml");
+    assertThat(parseResult.getParamValue("action").toString()).isEqualTo("APPLY");
+
+
+    parseResult = gfsh.parse(IMPORT_SHARED_CONFIG + " --group=''");
+    assertThat(parseResult.getParamValue("group")).isEqualTo("cluster");
+  }
+
+  @Test
+  public void preValidation() {
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG + " --xml-file=abc").statusIsError()
+        .containsOutput("Invalid file type");
+
+
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG + " --zip-file-name=b.zip")
+        .statusIsError().containsOutput("'b.zip' not found");
+
+
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG + " --xml-file=a.xml").statusIsError()
+        .containsOutput("'a.xml' not found");
+
+    gfsh.executeAndAssertThat(command, commandWithFile + " --group='group1,group2'").statusIsError()
+        .containsOutput("Only a single group name is supported");
+
+    gfsh.executeAndAssertThat(command,
+        IMPORT_SHARED_CONFIG + " --zip-file-name=b.zip --group=group1").statusIsError()
+        .containsOutput("zip file can not be imported with a specific group");
+
+
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG + " --zip-file-name=b.zip")
+        .statusIsError().containsOutput("'b.zip' not found");
+
+
+    gfsh.executeAndAssertThat(command, IMPORT_SHARED_CONFIG + " --xml-file=a.xml").statusIsError()
+        .containsOutput("'a.xml' not found");
+
+    gfsh.executeAndAssertThat(command, commandWithFile + " --zip-file-name=b.zip").statusIsError()
+        .containsOutput("Zip file and xml File can't both be specified");
+
+    gfsh.executeAndAssertThat(command,
+        IMPORT_SHARED_CONFIG + " --zip-file-name=b.zip --group=group1").statusIsError()
+        .containsOutput("zip file can not be imported with a specific group");
+
+  }
+
+  @Test
+  public void clusterConfigurationNotRunning() {
+    doReturn(false).when(command).isSharedConfigurationRunning();
+    gfsh.executeAndAssertThat(command, commandWithFile).statusIsError()
+        .containsOutput("Cluster configuration service is not running");
+  }
+
+  @Test
+  public void noMemberFound() throws IOException {
+    String xmlContent = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><cache/>";
+    FileUtils.write(xmlFile, xmlContent, Charset.defaultCharset());
+    when(ccService.getConfiguration(any())).thenReturn(configuration);
+
+    gfsh.executeAndAssertThat(command, commandWithFile).statusIsSuccess()
+        .containsOutput("Successfully set the 'cluster' configuration to the content of");
+    assertThat(configuration.getCacheXmlContent()).isEqualTo(xmlContent);
+  }
+
+  @Test
+  public void invalidXml() throws IOException {
+    String xmlContent =
+        "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><cache><region></cache>";
+    FileUtils.write(xmlFile, xmlContent, Charset.defaultCharset());
+    when(ccService.getConfiguration(any())).thenReturn(configuration);
+
+    gfsh.executeAndAssertThat(command, commandWithFile).statusIsError()
+        .containsOutput("Error while processing command ");
+  }
+
+  @Test
+  public void existingMembersWithoutStaging() {
+    doReturn(Collections.singleton(mock(DistributedMember.class))).when(command).findMembers(any());
+    when(ccService.hasXmlConfiguration()).thenReturn(true);
+    gfsh.executeAndAssertThat(command, commandWithFile).statusIsError()
+        .containsOutput("Can not configure servers that are already configured");
+  }
+
+  @Test
+  public void existingMembersWithBounce() {
+    doReturn(Collections.singleton(mock(DistributedMember.class))).when(command).findMembers(any());
+    when(ccService.hasXmlConfiguration()).thenReturn(true);
+    gfsh.executeAndAssertThat(command, commandWithFile + "--action=APPLY").statusIsError()
+        .containsOutput("Can not configure servers that are already configured");
+  }
+
+  @Test
+  public void existingMembersWithIgnore() {
+    doReturn(Collections.singleton(mock(DistributedMember.class))).when(command).findMembers(any());
+    when(ccService.getConfiguration(any())).thenReturn(configuration);
+
+    gfsh.executeAndAssertThat(command, commandWithFile + "--action=STAGE").statusIsSuccess()
+        .containsOutput("Successfully set the 'cluster' configuration to the content of")
+        .containsOutput("Existing servers are not affected with this configuration change");
+  }
+}
diff --git a/geode-dunit/src/main/java/org/apache/geode/management/internal/cli/commands/QueryCommandIntegrationTestBase.java b/geode-dunit/src/main/java/org/apache/geode/management/internal/cli/commands/QueryCommandIntegrationTestBase.java
index 78d3bc3..9ca0c44 100644
--- a/geode-dunit/src/main/java/org/apache/geode/management/internal/cli/commands/QueryCommandIntegrationTestBase.java
+++ b/geode-dunit/src/main/java/org/apache/geode/management/internal/cli/commands/QueryCommandIntegrationTestBase.java
@@ -158,9 +158,9 @@ public class QueryCommandIntegrationTestBase {
 
     List<String> lines = Files.readLines(outputFile, StandardCharsets.UTF_8);
 
-    assertThat(lines.get(4)).isEqualTo("Result");
-    assertThat(lines.get(5)).isEqualTo("--------");
-    lines.subList(6, lines.size()).forEach(line -> assertThat(line).matches("value\\d+"));
+    assertThat(lines.get(7)).isEqualTo("Result");
+    assertThat(lines.get(8)).isEqualTo("--------");
+    lines.subList(9, lines.size()).forEach(line -> assertThat(line).matches("value\\d+"));
   }
 
   @Test
@@ -176,8 +176,8 @@ public class QueryCommandIntegrationTestBase {
     assertThat(outputFile).exists();
     List<String> lines = Files.readLines(outputFile, StandardCharsets.UTF_8);
 
-    assertThat(lines.get(4)).containsPattern("name\\s+\\|\\s+address");
-    lines.subList(6, lines.size())
+    assertThat(lines.get(7)).containsPattern("name\\s+\\|\\s+address");
+    lines.subList(9, lines.size())
         .forEach(line -> assertThat(line).matches("name\\d+.*\"city\":\"Hometown\".*"));
   }
 
diff --git a/geode-dunit/src/main/java/org/apache/geode/management/internal/configuration/ClusterConfig.java b/geode-dunit/src/main/java/org/apache/geode/management/internal/configuration/ClusterConfig.java
index 4488094..4dd5849 100644
--- a/geode-dunit/src/main/java/org/apache/geode/management/internal/configuration/ClusterConfig.java
+++ b/geode-dunit/src/main/java/org/apache/geode/management/internal/configuration/ClusterConfig.java
@@ -129,7 +129,7 @@ public class ClusterConfig implements Serializable {
       Set<String> actualFiles =
           toSetIgnoringHiddenFiles(new File(clusterConfigDir, configGroup.name).list());
 
-      Set<String> expectedFiles = configGroup.getAllFiles();
+      Set<String> expectedFiles = configGroup.getAllJarFiles();
       assertThat(actualFiles).isEqualTo(expectedFiles);
     }
   }
diff --git a/geode-dunit/src/main/java/org/apache/geode/management/internal/security/GfshCommandsSecurityTestBase.java b/geode-dunit/src/main/java/org/apache/geode/management/internal/security/GfshCommandsSecurityTestBase.java
index cc112ad..17c38fa 100644
--- a/geode-dunit/src/main/java/org/apache/geode/management/internal/security/GfshCommandsSecurityTestBase.java
+++ b/geode-dunit/src/main/java/org/apache/geode/management/internal/security/GfshCommandsSecurityTestBase.java
@@ -30,6 +30,7 @@ import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.management.cli.Result;
 import org.apache.geode.management.internal.cli.result.CommandResult;
 import org.apache.geode.management.internal.cli.result.ErrorResultData;
+import org.apache.geode.management.internal.cli.result.ModelCommandResult;
 import org.apache.geode.management.internal.cli.result.ResultBuilder;
 import org.apache.geode.security.SimpleTestSecurityManager;
 import org.apache.geode.test.junit.categories.SecurityTest;
@@ -147,6 +148,10 @@ public class GfshCommandsSecurityTestBase {
     for (TestCommand other : others) {
       System.out.println("Processing unauthorized command: " + other.getCommand());
       CommandResult result = gfshConnection.executeCommand(other.getCommand());
+      if (result instanceof ModelCommandResult) {
+        // ModelCommandResult don't send the error code anymore
+        break;
+      }
       int errorCode = ((ErrorResultData) result.getResultData()).getErrorCode();
 
       // for some commands there are pre execution checks to check for user input error, will skip
diff --git a/geode-junit/src/main/java/org/apache/geode/management/internal/configuration/ConfigGroup.java b/geode-junit/src/main/java/org/apache/geode/management/internal/configuration/ConfigGroup.java
index 8afd230..5fd1448 100644
--- a/geode-junit/src/main/java/org/apache/geode/management/internal/configuration/ConfigGroup.java
+++ b/geode-junit/src/main/java/org/apache/geode/management/internal/configuration/ConfigGroup.java
@@ -93,15 +93,15 @@ public class ConfigGroup implements Serializable {
     return Collections.unmodifiableSet(this.jars);
   }
 
-  public Set<String> getConfigFiles() {
-    return Collections.unmodifiableSet(this.configFiles);
-  }
-
   public Set<String> getAllFiles() {
     return Collections.unmodifiableSet(
         Stream.concat(this.jars.stream(), this.configFiles.stream()).collect(Collectors.toSet()));
   }
 
+  public Set<String> getAllJarFiles() {
+    return this.jars.stream().collect(Collectors.toSet());
+  }
+
   public Set<String> getRegions() {
     return Collections.unmodifiableSet(this.regions);
   }