You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by ji...@apache.org on 2017/03/04 00:04:12 UTC

[2/7] geode git commit: GEODE-2416: Collect together artifacts from individual servers into a single zip file

GEODE-2416: Collect together artifacts from individual servers into a single zip file

 - GEODE-2414: Determine a mechanism to stream a zip file from server to locator
 - GEODE-2415: Write a function to return a zip file for a single server


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/4c6f3695
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/4c6f3695
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/4c6f3695

Branch: refs/heads/develop
Commit: 4c6f3695db9ca69f5c1952128eda91ea5dfe3ede
Parents: e5121ab
Author: Jared Stewart <js...@pivotal.io>
Authored: Sun Feb 12 11:30:58 2017 -0800
Committer: Jinmei Liao <ji...@pivotal.io>
Committed: Fri Mar 3 16:03:49 2017 -0800

----------------------------------------------------------------------
 .../geode/management/internal/cli/CliUtil.java  |  29 +-
 .../cli/commands/MiscellaneousCommands.java     | 368 ++++---------------
 .../cli/functions/ExportLogsFunction.java       | 203 ++++++++++
 .../cli/util/ExportLogsCacheWriter.java         |  78 ++++
 .../internal/cli/util/ExportLogsRepository.java |  39 ++
 .../internal/cli/util/LogExporter.java          | 136 +++++++
 .../management/internal/cli/util/LogFilter.java | 113 ++++++
 .../internal/cli/util/LogLevelExtractor.java    |  65 ++++
 .../management/internal/cli/util/MergeLogs.java |  89 ++++-
 .../internal/configuration/utils/ZipUtils.java  |   8 +-
 .../internal/cli/commands/ExportLogsDUnit.java  | 363 ++++++++++++++++++
 ...laneousCommandsExportLogsPart1DUnitTest.java | 138 -------
 ...laneousCommandsExportLogsPart2DUnitTest.java | 141 -------
 ...laneousCommandsExportLogsPart3DUnitTest.java | 156 --------
 ...laneousCommandsExportLogsPart4DUnitTest.java | 138 -------
 .../ExportLogsFunctionIntegrationTest.java      | 143 +++++++
 .../internal/cli/util/LogExporterTest.java      | 104 ++++++
 .../internal/cli/util/LogFilterTest.java        | 126 +++++++
 .../cli/util/LogLevelExtractorTest.java         |  61 +++
 .../internal/cli/util/MergeLogsTest.java        | 114 ++++++
 .../configuration/EventTestCacheWriter.java     |  36 ++
 .../configuration/ZipUtilsJUnitTest.java        |  16 +
 .../dunit/rules/GfshShellConnectionRule.java    |   4 +
 .../test/dunit/rules/LocatorStarterRule.java    |   1 +
 .../apache/geode/test/dunit/rules/Member.java   |   2 +-
 .../cli/commands/CommandOverHttpDUnitTest.java  |   6 +-
 26 files changed, 1772 insertions(+), 905 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/CliUtil.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/CliUtil.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/CliUtil.java
index 8cd098d..87a07a6 100755
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/CliUtil.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/CliUtil.java
@@ -301,7 +301,27 @@ public class CliUtil {
     return matchingMembers;
   }
 
+  /**
+   * Finds all Members (including both servers and locators) which belong to the given arrays of groups or members.
+   */
+  public static Set<DistributedMember> findMembersIncludingLocators(String[] groups, String[] members) {
+    Cache cache = CacheFactory.getAnyInstance();
+    Set<DistributedMember> allMembers = getAllMembers(cache);
+
+    return findMembers(allMembers, groups, members);
+  }
+
+  /**
+   * Finds all Servers which belong to the given arrays of groups or members.  Does not include locators.
+   */
   public static Set<DistributedMember> findMembers(String[] groups, String[] members) {
+    Cache cache = CacheFactory.getAnyInstance();
+    Set<DistributedMember> allNormalMembers = getAllNormalMembers(cache);
+
+    return findMembers(allNormalMembers, groups, members);
+  }
+
+  private static Set<DistributedMember> findMembers(Set<DistributedMember> membersToConsider, String[] groups, String[] members) {
     if (groups == null) {
       groups = new String[] {};
     }
@@ -310,21 +330,18 @@ public class CliUtil {
       members = new String[] {};
     }
 
-    Cache cache = CacheFactory.getAnyInstance();
-
     if ((members.length > 0) && (groups.length > 0)) {
       throw new IllegalArgumentException(CliStrings.PROVIDE_EITHER_MEMBER_OR_GROUP_MESSAGE);
     }
 
-    Set<DistributedMember> allNormalMembers = getAllNormalMembers(cache);
     if (members.length == 0 && groups.length == 0) {
-      return allNormalMembers;
+      return membersToConsider;
     }
 
     Set<DistributedMember> matchingMembers = new HashSet<DistributedMember>();
     // it will either go into this loop or the following loop, not both.
     for (String memberNameOrId : members) {
-      for (DistributedMember member : allNormalMembers) {
+      for (DistributedMember member : membersToConsider) {
         if (memberNameOrId.equalsIgnoreCase(member.getId())
             || memberNameOrId.equals(member.getName())) {
           matchingMembers.add(member);
@@ -333,7 +350,7 @@ public class CliUtil {
     }
 
     for (String group : groups) {
-      for (DistributedMember member : allNormalMembers) {
+      for (DistributedMember member : membersToConsider) {
         if (member.getGroups().contains(group)) {
           matchingMembers.add(member);
         }

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
index bbf0542..e720d09 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
@@ -24,10 +24,10 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.PrintWriter;
-import java.sql.Time;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.text.MessageFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -38,7 +38,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import java.util.StringTokenizer;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
@@ -50,9 +49,11 @@ import java.util.zip.DataFormatException;
 import java.util.zip.GZIPInputStream;
 import javax.management.ObjectName;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.LogWriter;
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.Region;
 import org.apache.geode.cache.execute.Execution;
 import org.apache.geode.cache.execute.Function;
 import org.apache.geode.cache.execute.FunctionException;
@@ -65,8 +66,7 @@ import org.apache.geode.distributed.internal.deadlock.Dependency;
 import org.apache.geode.distributed.internal.deadlock.DependencyGraph;
 import org.apache.geode.distributed.internal.deadlock.GemFireDeadlockDetector;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
-import org.apache.geode.internal.i18n.LocalizedStrings;
-import org.apache.geode.internal.logging.InternalLogWriter;
+import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.logging.LogWriterImpl;
 import org.apache.geode.management.CacheServerMXBean;
 import org.apache.geode.management.DistributedRegionMXBean;
@@ -89,9 +89,9 @@ import org.apache.geode.management.internal.cli.GfshParser;
 import org.apache.geode.management.internal.cli.LogWrapper;
 import org.apache.geode.management.internal.cli.domain.StackTracesPerMember;
 import org.apache.geode.management.internal.cli.functions.ChangeLogLevelFunction;
+import org.apache.geode.management.internal.cli.functions.ExportLogsFunction;
 import org.apache.geode.management.internal.cli.functions.GarbageCollectionFunction;
 import org.apache.geode.management.internal.cli.functions.GetStackTracesFunction;
-import org.apache.geode.management.internal.cli.functions.LogFileFunction;
 import org.apache.geode.management.internal.cli.functions.NetstatFunction;
 import org.apache.geode.management.internal.cli.functions.NetstatFunction.NetstatFunctionArgument;
 import org.apache.geode.management.internal.cli.functions.NetstatFunction.NetstatFunctionResult;
@@ -109,17 +109,19 @@ import org.apache.geode.management.internal.cli.result.ResultDataException;
 import org.apache.geode.management.internal.cli.result.TabularResultData;
 import org.apache.geode.management.internal.cli.shell.Gfsh;
 import org.apache.geode.management.internal.cli.util.MergeLogs;
+import org.apache.geode.management.internal.cli.util.ExportLogsCacheWriter;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
 import org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.ResourcePermission.Operation;
 import org.apache.geode.security.ResourcePermission.Resource;
 
+import org.apache.logging.log4j.Logger;
 import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
 
 /**
- *
  * @since GemFire 7.0
  */
 public class MiscellaneousCommands implements CommandMarker {
@@ -145,8 +147,6 @@ public class MiscellaneousCommands implements CommandMarker {
 
       logger.info("Gfsh executing shutdown on members " + includeMembers);
 
-
-
       Callable<String> shutdownNodes = new Callable<String>() {
 
         @Override
@@ -210,8 +210,6 @@ public class MiscellaneousCommands implements CommandMarker {
 
       locators.removeAll(dataNodes);
 
-
-
       if (!shutdownLocators && numDataNodes == 0) {
         return ResultBuilder.createInfoResult(CliStrings.SHUTDOWN__MSG__NO_DATA_NODE_FOUND);
       }
@@ -248,8 +246,8 @@ public class MiscellaneousCommands implements CommandMarker {
       }
 
       if (locators.contains(manager) && !shutdownLocators) { // This means manager is a locator and
-                                                             // shutdownLocators is false. Hence we
-                                                             // should not stop the manager
+        // shutdownLocators is false. Hence we
+        // should not stop the manager
         return ResultBuilder.createInfoResult("Shutdown is triggered");
       }
       // now shut down this manager
@@ -272,12 +270,9 @@ public class MiscellaneousCommands implements CommandMarker {
   }
 
   /**
-   * 
    * @param timeout user specified timeout
    * @param nodesToBeStopped list of nodes to be stopped
    * @return Elapsed time to shutdown the given nodes;
-   * @throws ExecutionException
-   * @throws InterruptedException
    */
   private long shutDownNodeWithTimeOut(long timeout, Set<DistributedMember> nodesToBeStopped)
       throws TimeoutException, InterruptedException, ExecutionException {
@@ -290,9 +285,9 @@ public class MiscellaneousCommands implements CommandMarker {
     long timeElapsed = shutDownTimeEnd - shutDownTimeStart;
 
     if (timeElapsed > timeout || Boolean.getBoolean("ThrowTimeoutException")) { // The second check
-                                                                                // for
-                                                                                // ThrowTimeoutException
-                                                                                // is a test hook
+      // for
+      // ThrowTimeoutException
+      // is a test hook
       throw new TimeoutException();
     }
     return timeElapsed;
@@ -532,7 +527,7 @@ public class MiscellaneousCommands implements CommandMarker {
         }
         resultData.addAsFile(saveToFile, resultInfo.toString(),
             CliStrings.NETSTAT__MSG__SAVED_OUTPUT_IN_0, false); // Note: substitution for {0} will
-                                                                // happen on client side.
+        // happen on client side.
       } else {
         resultData.addLine(resultInfo.toString());
       }
@@ -694,87 +689,6 @@ public class MiscellaneousCommands implements CommandMarker {
     return result;
   }
 
-  Result exportLogsPreprocessing(String dirName, String[] groups, String memberId, String logLevel,
-      boolean onlyLogLevel, boolean mergeLog, String start, String end,
-      int numOfLogFilesForTesting) {
-    Result result = null;
-    try {
-      LogWrapper.getInstance().fine("Exporting logs");
-      Cache cache = CacheFactory.getAnyInstance();
-      Set<DistributedMember> dsMembers = new HashSet<DistributedMember>();
-      Time startTime = null, endTime = null;
-
-      if (logLevel == null || logLevel.length() == 0) {
-        // set default log level
-        logLevel = LogWriterImpl.levelToString(InternalLogWriter.INFO_LEVEL);
-      }
-      if (start != null && end == null) {
-        startTime = parseDate(start);
-        endTime = new Time(System.currentTimeMillis());
-      }
-
-      if (end != null && start == null) {
-        endTime = parseDate(end);
-        startTime = new Time(0);
-      }
-      if (start != null && end != null) {
-        startTime = parseDate(start);
-        endTime = parseDate(end);
-        if (endTime.getTime() - startTime.getTime() <= 0) {
-          result =
-              ResultBuilder.createUserErrorResult(CliStrings.EXPORT_LOGS__MSG__INVALID_TIMERANGE);
-        }
-      }
-      if (end == null && start == null) {
-        // set default time range as 1 day.
-        endTime = new Time(System.currentTimeMillis());
-        startTime = new Time(endTime.getTime() - 24 * 60 * 60 * 1000);
-      }
-      LogWrapper.getInstance().fine(
-          "Exporting logs startTime=" + startTime.toGMTString() + " " + startTime.toLocaleString());
-      LogWrapper.getInstance()
-          .fine("Exporting logs endTime=" + endTime.toGMTString() + " " + endTime.toLocaleString());
-      if (groups != null && memberId != null) {
-        result =
-            ResultBuilder.createUserErrorResult(CliStrings.EXPORT_LOGS__MSG__SPECIFY_ONE_OF_OPTION);
-      } else if (groups != null && groups.length > 0) {
-        for (String group : groups) {
-          Set<DistributedMember> groupMembers = cache.getDistributedSystem().getGroupMembers(group);
-          if (groupMembers != null && groupMembers.size() > 0) {
-            dsMembers.addAll(groupMembers);
-          }
-        }
-        if (dsMembers.size() == 0) {
-          result = ResultBuilder
-              .createUserErrorResult(CliStrings.EXPORT_LOGS__MSG__NO_GROUPMEMBER_FOUND);
-        }
-        result = export(cache, dsMembers, dirName, logLevel, onlyLogLevel ? "true" : "false",
-            mergeLog, startTime, endTime, numOfLogFilesForTesting);
-      } else if (memberId != null) {
-        DistributedMember member = CliUtil.getDistributedMemberByNameOrId(memberId);
-        if (member == null) {
-          result = ResultBuilder.createUserErrorResult(
-              CliStrings.format(CliStrings.EXPORT_LOGS__MSG__INVALID_MEMBERID, memberId));
-        }
-        dsMembers.add(member);
-        result = export(cache, dsMembers, dirName, logLevel, onlyLogLevel ? "true" : "false",
-            mergeLog, startTime, endTime, numOfLogFilesForTesting);
-      } else {
-        // run in entire DS members and get all including locators
-        dsMembers.addAll(CliUtil.getAllMembers(cache));
-        result = export(cache, dsMembers, dirName, logLevel, onlyLogLevel ? "true" : "false",
-            mergeLog, startTime, endTime, numOfLogFilesForTesting);
-      }
-    } catch (ParseException ex) {
-      LogWrapper.getInstance().fine(ex.getMessage());
-      result = ResultBuilder.createUserErrorResult(ex.getMessage());
-    } catch (Exception ex) {
-      LogWrapper.getInstance().fine(ex.getMessage());
-      result = ResultBuilder.createUserErrorResult(ex.getMessage());
-    }
-    return result;
-  }
-
   @CliCommand(value = CliStrings.EXPORT_LOGS, help = CliStrings.EXPORT_LOGS__HELP)
   @CliMetaData(shellOnly = false,
       relatedTopic = {CliStrings.TOPIC_GEODE_SERVER, CliStrings.TOPIC_GEODE_DEBUG_UTIL})
@@ -789,7 +703,7 @@ public class MiscellaneousCommands implements CommandMarker {
       @CliOption(key = CliStrings.EXPORT_LOGS__MEMBER,
           unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
           optionContext = ConverterHint.ALL_MEMBER_IDNAME,
-          help = CliStrings.EXPORT_LOGS__MEMBER__HELP) String memberId,
+          help = CliStrings.EXPORT_LOGS__MEMBER__HELP) String[] memberIds,
       @CliOption(key = CliStrings.EXPORT_LOGS__LOGLEVEL,
           unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
           optionContext = ConverterHint.LOG_LEVEL,
@@ -805,207 +719,63 @@ public class MiscellaneousCommands implements CommandMarker {
           unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
           help = CliStrings.EXPORT_LOGS__ENDTIME__HELP) String end) {
     Result result = null;
-    try {
-      result = exportLogsPreprocessing(dirName, groups, memberId, logLevel, onlyLogLevel, mergeLog,
-          start, end, 0);
-    } catch (Exception ex) {
-      LogWrapper.getInstance().fine(ex.getMessage());
-      result = ResultBuilder.createUserErrorResult(ex.getMessage());
-    }
-    LogWrapper.getInstance().fine("Exporting logs returning =" + result);
-    return result;
-  }
+    Logger logger = LogService.getLogger();
 
-  Time parseDate(String dateString) throws ParseException {
-    Time time = null;
     try {
-      SimpleDateFormat df = new SimpleDateFormat(MiscellaneousCommands.FORMAT);
-      time = new Time(df.parse(dateString).getTime());
-    } catch (Exception e) {
-      SimpleDateFormat df = new SimpleDateFormat(MiscellaneousCommands.ONLY_DATE_FORMAT);
-      time = new Time(df.parse(dateString).getTime());
-    }
-    return time;
-  }
+      GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
 
-  Result export(Cache cache, Set<DistributedMember> dsMembers, String dirName, String logLevel,
-      String onlyLogLevel, boolean mergeLog, Time startTime, Time endTime,
-      int numOfLogFilesForTesting) {
-    LogWrapper.getInstance()
-        .fine("Exporting logs in export membersize = " + dsMembers.size() + " dirname=" + dirName
-            + " logLevel=" + logLevel + " onlyLogLevel=" + onlyLogLevel + " mergeLog=" + mergeLog
-            + " startTime=" + startTime.toGMTString() + "endTime=" + endTime.toGMTString());
-    Function function = new LogFileFunction();
-    FunctionService.registerFunction(function);
-    try {
-      List<?> resultList = null;
+      Set<DistributedMember> targetMembers = CliUtil.findMembersIncludingLocators(groups, memberIds);
 
-      List<String> logsToMerge = new ArrayList<String>();
-
-      Iterator<DistributedMember> it = dsMembers.iterator();
-      Object[] args = new Object[6];
-      args[0] = dirName;
-      args[1] = logLevel;
-      args[2] = onlyLogLevel;
-      args[3] = startTime.getTime();
-      args[4] = endTime.getTime();
-      args[5] = numOfLogFilesForTesting;
-
-      while (it.hasNext()) {
-        boolean toContinueForRestOfmembers = false;
-        DistributedMember member = it.next();
-        LogWrapper.getInstance().fine("Exporting logs copy the logs for member=" + member.getId());
-        try {
-          resultList = (ArrayList<?>) CliUtil.executeFunction(function, args, member).getResult();
-        } catch (Exception ex) {
-          LogWrapper.getInstance()
-              .fine(CliStrings.format(
-                  CliStrings.EXPORT_LOGS__MSG__FAILED_TO_EXPORT_LOG_FILES_FOR_MEMBER_0,
-                  member.getId()), ex);
-          // try for other members
-          continue;
-        }
 
-        if (resultList != null && !resultList.isEmpty()) {
-          for (int i = 0; i < resultList.size(); i++) {
-            Object object = resultList.get(i);
-            if (object instanceof Exception) {
-              ResultBuilder.createGemFireErrorResult(CliStrings.format(
-                  CliStrings.EXPORT_LOGS__MSG__FAILED_TO_EXPORT_LOG_FILES_FOR_MEMBER_0,
-                  member.getId()));
-              LogWrapper.getInstance().fine("Exporting logs for member=" + member.getId()
-                  + " exception=" + ((Throwable) object).getMessage(), ((Throwable) object));
-              toContinueForRestOfmembers = true;
-              break;
-            } else if (object instanceof Throwable) {
-              ResultBuilder.createGemFireErrorResult(CliStrings.format(
-                  CliStrings.EXPORT_LOGS__MSG__FAILED_TO_EXPORT_LOG_FILES_FOR_MEMBER_0,
-                  member.getId()));
-
-              Throwable th = (Throwable) object;
-              LogWrapper.getInstance().fine(CliUtil.stackTraceAsString((th)));
-              LogWrapper.getInstance().fine("Exporting logs for member=" + member.getId()
-                  + " exception=" + ((Throwable) object).getMessage(), ((Throwable) object));
-              toContinueForRestOfmembers = true;
-              break;
-            }
-          }
-        } else {
-          LogWrapper.getInstance().fine("Exporting logs for member=" + member.getId()
-              + " resultList is either null or empty");
-          continue;
-        }
+      Map<String, Path> zipFilesFromMembers = new HashMap<>();
+      for (DistributedMember server : targetMembers) {
+        Region region = ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
 
-        if (toContinueForRestOfmembers == true) {
-          LogWrapper.getInstance().fine("Exporting logs for member=" + member.getId()
-              + " toContinueForRestOfmembers=" + toContinueForRestOfmembers);
-          // proceed for rest of the member
-          continue;
-        }
+        ExportLogsCacheWriter cacheWriter =
+            (ExportLogsCacheWriter) region.getAttributes().getCacheWriter();
 
-        String rstList = (String) resultList.get(0);
-        LogWrapper.getInstance().fine("for member=" + member.getId()
-            + "Successfully exported to directory=" + dirName + " rstList=" + rstList);
-        if (rstList == null || rstList.length() == 0) {
-          ResultBuilder.createGemFireErrorResult(CliStrings.format(
-              CliStrings.EXPORT_LOGS__MSG__FAILED_TO_EXPORT_LOG_FILES_FOR_MEMBER_0,
-              member.getId()));
-          LogWrapper.getInstance().fine("for member=" + member.getId() + "rstList is null");
-          continue;
-        } else if (rstList.contains("does not exist or cannot be created")) {
-          LogWrapper.getInstance()
-              .fine("for member=" + member.getId() + " does not exist or cannot be created");
-          return ResultBuilder.createInfoResult(CliStrings
-              .format(CliStrings.EXPORT_LOGS__MSG__TARGET_DIR_CANNOT_BE_CREATED, dirName));
-        } else if (rstList.contains(
-            LocalizedStrings.InternalDistributedSystem_THIS_CONNECTION_TO_A_DISTRIBUTED_SYSTEM_HAS_BEEN_DISCONNECTED
-                .toLocalizedString())) {
-          LogWrapper.getInstance()
-              .fine("for member=" + member.getId()
-                  + LocalizedStrings.InternalDistributedSystem_THIS_CONNECTION_TO_A_DISTRIBUTED_SYSTEM_HAS_BEEN_DISCONNECTED
-                      .toLocalizedString());
-          // proceed for rest of the members
-          continue;
-        }
+        cacheWriter.startFile(server.getName());
 
-        // maintain list of log files to merge only when merge option is true.
-        if (mergeLog == true) {
-          StringTokenizer st = new StringTokenizer(rstList, ";");
-          while (st.hasMoreTokens()) {
-            logsToMerge.add(st.nextToken());
-          }
-        }
+        CliUtil.executeFunction(new ExportLogsFunction(),
+                new ExportLogsFunction.Args(start, end, logLevel, onlyLogLevel), server)
+            .getResult();
+        Path zipFile = cacheWriter.endFile();
+        ExportLogsFunction.destroyExportLogsRegion();
+        logger.info("Recieved zip file from member " + server.getId() + ": " + zipFile.toString());
+        zipFilesFromMembers.put(server.getId(), zipFile);
       }
-      // merge log files
-      if (mergeLog == true) {
-        LogWrapper.getInstance().fine("Successfully exported to directory=" + dirName
-            + " and now merging logsToMerge=" + logsToMerge.size());
-        mergeLogs(logsToMerge);
-        return ResultBuilder
-            .createInfoResult("Successfully exported and merged in directory " + dirName);
+
+      Path tempDir = Files.createTempDirectory("exportedLogs");
+      Path exportedLogsDir = tempDir.resolve("exportedLogs");
+
+      for (Path zipFile : zipFilesFromMembers.values()) {
+        Path unzippedMemberDir =
+            exportedLogsDir.resolve(zipFile.getFileName().toString().replace(".zip", ""));
+        ZipUtils.unzip(zipFile.toAbsolutePath().toString(), unzippedMemberDir.toString());
+        FileUtils.deleteQuietly(zipFile.toFile());
       }
-      LogWrapper.getInstance().fine("Successfully exported to directory without merging" + dirName);
-      return ResultBuilder.createInfoResult("Successfully exported to directory " + dirName);
+
+      Path workingDir = Paths.get(System.getProperty("user.dir"));
+      Path exportedLogsZipFile = workingDir.resolve("exportedLogs[" + System.currentTimeMillis() + "].zip").toAbsolutePath();
+
+      logger.info("Zipping into: " + exportedLogsZipFile.toString());
+      ZipUtils.zipDirectory(exportedLogsDir, exportedLogsZipFile);
+      FileUtils.deleteDirectory(tempDir.toFile());
+      result = ResultBuilder.createInfoResult("File exported to: " + exportedLogsZipFile.toString());
     } catch (Exception ex) {
-      LogWrapper.getInstance().info(ex.getMessage(), ex);
-      return ResultBuilder.createUserErrorResult(
-          CliStrings.EXPORT_LOGS__MSG__FUNCTION_EXCEPTION + ((LogFileFunction) function).getId());
+      ex.printStackTrace();
+      logger.error(ex, ex);
+      result = ResultBuilder.createUserErrorResult(ex.getMessage());
+    } finally {
+      ExportLogsFunction.destroyExportLogsRegion();
     }
-  }
 
-  Result mergeLogs(List<String> logsToMerge) {
-    // create a new process for merging
-    LogWrapper.getInstance().fine("Exporting logs merging logs" + logsToMerge.size());
-    if (logsToMerge.size() > 1) {
-      List<String> commandList = new ArrayList<String>();
-      commandList.add(System.getProperty("java.home") + File.separatorChar + "bin"
-          + File.separatorChar + "java");
-      commandList.add("-classpath");
-      commandList.add(System.getProperty("java.class.path", "."));
-      commandList.add(MergeLogs.class.getName());
-
-      commandList
-          .add(logsToMerge.get(0).substring(0, logsToMerge.get(0).lastIndexOf(File.separator) + 1));
-
-      ProcessBuilder procBuilder = new ProcessBuilder(commandList);
-      StringBuilder output = new StringBuilder();
-      String errorString = new String();
-      try {
-        LogWrapper.getInstance().fine("Exporting logs now merging logs");
-        Process mergeProcess = procBuilder.redirectErrorStream(true).start();
-
-        mergeProcess.waitFor();
-
-        InputStream inputStream = mergeProcess.getInputStream();
-        BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
-        String line = null;
-
-        while ((line = br.readLine()) != null) {
-          output.append(line).append(GfshParser.LINE_SEPARATOR);
-        }
-        mergeProcess.destroy();
-      } catch (Exception e) {
-        LogWrapper.getInstance().fine(e.getMessage());
-        return ResultBuilder.createUserErrorResult(
-            CliStrings.EXPORT_LOGS__MSG__FUNCTION_EXCEPTION + "Could not merge");
-      } finally {
-        if (errorString != null) {
-          output.append(errorString).append(GfshParser.LINE_SEPARATOR);
-          LogWrapper.getInstance().fine("Exporting logs after merging logs " + output);
-        }
-      }
-      if (output.toString().contains("Sucessfully merged logs")) {
-        LogWrapper.getInstance().fine("Exporting logs Sucessfully merged logs");
-        return ResultBuilder.createInfoResult("Successfully merged");
-      } else {
-        LogWrapper.getInstance().fine("Could not merge");
-        return ResultBuilder.createUserErrorResult(
-            CliStrings.EXPORT_LOGS__MSG__FUNCTION_EXCEPTION + "Could not merge");
-      }
-    }
-    return ResultBuilder.createInfoResult("Only one log file, nothing to merge");
+    LogWrapper.getInstance().fine("Exporting logs returning =" + result);
+    return result;
   }
 
+
+
   /****
    * Current implementation supports writing it to a file and returning the location of the file
    *
@@ -1112,7 +882,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /***
    * Writes the Stack traces member-wise to a text file
-   * 
+   *
    * @param dumps - Map containing key : member , value : zipped stack traces
    * @param fileName - Name of the file to which the stack-traces are written to
    * @return Canonical path of the file which contains the stack-traces
@@ -1266,7 +1036,6 @@ public class MiscellaneousCommands implements CommandMarker {
         csvBuilder.append('\n');
       }
 
-
       CompositeResultData crd = ResultBuilder.createCompositeResultData();
       SectionResultData section = crd.addSection();
       TabularResultData metricsTable = section.addTable();
@@ -1410,7 +1179,6 @@ public class MiscellaneousCommands implements CommandMarker {
         Set<String> checkSet = new HashSet<String>(categoriesMap.keySet());
         Set<String> userCategories = getSetDifference(categories, checkSet);
 
-
         // Checking if the categories specified by the user are valid or not
         if (userCategories.isEmpty()) {
           for (String category : checkSet) {
@@ -1604,7 +1372,7 @@ public class MiscellaneousCommands implements CommandMarker {
             memberMxBean.getDiskFlushAvgLatency(), csvBuilder);
         writeToTableAndCsv(metricsTable, "", "totalQueueSize",
             memberMxBean.getTotalDiskTasksWaiting(), csvBuilder); // deadcoded to workaround bug
-                                                                  // 46397
+        // 46397
         writeToTableAndCsv(metricsTable, "", "totalBackupInProgress",
             memberMxBean.getTotalBackupInProgress(), csvBuilder);
       }
@@ -2059,7 +1827,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /***
    * Writes an entry to a TabularResultData and writes a comma separated entry to a string builder
-   * 
+   *
    * @param metricsTable
    * @param type
    * @param metricName
@@ -2116,7 +1884,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /****
    * Defines and returns map of categories for System metrics.
-   * 
+   *
    * @return map with categories for system metrics and display flag set to true
    */
   private Map<String, Boolean> getSystemMetricsCategories() {
@@ -2130,7 +1898,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /****
    * Defines and returns map of categories for Region Metrics
-   * 
+   *
    * @return map with categories for region metrics and display flag set to true
    */
   private Map<String, Boolean> getRegionMetricsCategories() {
@@ -2149,7 +1917,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /****
    * Defines and returns map of categories for system-wide region metrics
-   * 
+   *
    * @return map with categories for system wide region metrics and display flag set to true
    */
   private Map<String, Boolean> getSystemRegionMetricsCategories() {
@@ -2160,7 +1928,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /*****
    * Defines and returns map of categories for member metrics
-   * 
+   *
    * @return map with categories for member metrics and display flag set to true
    */
   private Map<String, Boolean> getMemberMetricsCategories() {
@@ -2214,7 +1982,7 @@ public class MiscellaneousCommands implements CommandMarker {
 
   /***
    * Writes to a TabularResultData and also appends a CSV string to a String builder
-   * 
+   *
    * @param metricsTable
    * @param type
    * @param metricName
@@ -2280,7 +2048,6 @@ public class MiscellaneousCommands implements CommandMarker {
       Set<DistributedMember> dsMembers = new HashSet<DistributedMember>();
       Set<DistributedMember> ds = CliUtil.getAllMembers(cache);
 
-
       if (grps != null && grps.length > 0) {
         for (String grp : grps) {
           dsMembers.addAll(cache.getDistributedSystem().getGroupMembers(grp));
@@ -2359,7 +2126,6 @@ public class MiscellaneousCommands implements CommandMarker {
   }
 
 
-
   @CliAvailabilityIndicator({CliStrings.SHUTDOWN, CliStrings.GC, CliStrings.SHOW_DEADLOCK,
       CliStrings.SHOW_METRICS, CliStrings.SHOW_LOG, CliStrings.EXPORT_STACKTRACE,
       CliStrings.NETSTAT, CliStrings.EXPORT_LOGS, CliStrings.CHANGE_LOGLEVEL})

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
new file mode 100644
index 0000000..1dc89e9
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.functions;
+
+import static java.util.stream.Collectors.toSet;
+import static org.apache.geode.distributed.internal.DistributionManager.LOCATOR_DM_TYPE;
+
+import org.apache.geode.cache.AttributesFactory;
+import org.apache.geode.cache.DataPolicy;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.Scope;
+import org.apache.geode.cache.execute.Function;
+import org.apache.geode.cache.execute.FunctionContext;
+import org.apache.geode.internal.InternalEntity;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalRegionArguments;
+import org.apache.geode.internal.lang.StringUtils;
+import org.apache.geode.internal.logging.InternalLogWriter;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.internal.logging.LogWriterImpl;
+import org.apache.geode.management.internal.cli.commands.MiscellaneousCommands;
+import org.apache.geode.management.internal.cli.util.ExportLogsCacheWriter;
+import org.apache.geode.management.internal.cli.util.LogExporter;
+import org.apache.geode.management.internal.cli.util.LogFilter;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import org.apache.logging.log4j.Logger;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.util.Arrays;
+import java.util.Set;
+import java.util.stream.Stream;
+
+public class ExportLogsFunction implements Function, InternalEntity {
+  public static final String EXPORT_LOGS_REGION = "__exportLogsRegion";
+  private static final Logger LOGGER = LogService.getLogger();
+  private static final long serialVersionUID = 1L;
+  private static final int BUFFER_SIZE = 1024;
+
+
+  @Override
+  public void execute(final FunctionContext context) {
+    try {
+      // TODO: change this to get cache from FunctionContext when it becomes available
+      GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+
+      String memberId = cache.getDistributedSystem().getMemberId();
+      LOGGER.info("ExportLogsFunction started for member {}", memberId);
+
+      Region exportLogsRegion = createOrGetExistingExportLogsRegion(false);
+
+      Args args = (Args) context.getArguments();
+      LogFilter logFilter =
+          new LogFilter(args.getPermittedLogLevels(), args.getStartTime(), args.getEndTime());
+      Path workingDir = Paths.get(System.getProperty("user.dir"));
+
+      Path exportedZipFile = new LogExporter(logFilter).export(workingDir);
+
+      LOGGER.info("Streaming zipped file: " + exportedZipFile.toString());
+      try (FileInputStream inputStream = new FileInputStream(exportedZipFile.toFile())) {
+        byte[] buffer = new byte[BUFFER_SIZE];
+
+        int bytesRead;
+        while ((bytesRead = inputStream.read(buffer)) > 0) {
+          if (bytesRead == BUFFER_SIZE) {
+            exportLogsRegion.put(memberId, buffer);
+          } else {
+            exportLogsRegion.put(memberId, Arrays.copyOfRange(buffer, 0, bytesRead));
+          }
+        }
+      }
+
+      context.getResultSender().lastResult(null);
+
+    } catch (Exception e) {
+      LOGGER.error(e);
+      context.getResultSender().sendException(e);
+    }
+  }
+
+  protected static boolean isLocator(GemFireCacheImpl cache) {
+    return cache.getMyId().getVmKind() == LOCATOR_DM_TYPE;
+  }
+
+  public static Region createOrGetExistingExportLogsRegion(boolean isInitiatingMember)
+      throws IOException, ClassNotFoundException {
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+
+    Region exportLogsRegion = cache.getRegion(EXPORT_LOGS_REGION);
+    if (exportLogsRegion == null) {
+      AttributesFactory<String, Configuration> regionAttrsFactory =
+          new AttributesFactory<String, Configuration>();
+      regionAttrsFactory.setDataPolicy(DataPolicy.EMPTY);
+      regionAttrsFactory.setScope(Scope.DISTRIBUTED_ACK);
+
+      if (isInitiatingMember) {
+        regionAttrsFactory.setCacheWriter(new ExportLogsCacheWriter());
+      }
+      InternalRegionArguments internalArgs = new InternalRegionArguments();
+      internalArgs.setIsUsedForMetaRegion(true);
+      exportLogsRegion =
+          cache.createVMRegion(EXPORT_LOGS_REGION, regionAttrsFactory.create(), internalArgs);
+    }
+
+    return exportLogsRegion;
+  }
+
+  public static void destroyExportLogsRegion() {
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+
+    Region exportLogsRegion = cache.getRegion(EXPORT_LOGS_REGION);
+    if (exportLogsRegion == null) {
+      return;
+    }
+
+      exportLogsRegion.destroyRegion();
+
+  }
+
+  @Override
+  public boolean isHA() {
+    return false;
+  }
+
+  public static class Args implements Serializable {
+    private String startTime;
+    private String endTime;
+    private String logLevel;
+    private boolean logLevelOnly;
+
+    public Args(String startTime, String endTime, String logLevel, boolean logLevelOnly) {
+      this.startTime = startTime;
+      this.endTime = endTime;
+      this.logLevel = logLevel;
+      this.logLevelOnly = logLevelOnly;
+    }
+
+    public LocalDateTime getStartTime() {
+      return parseTime(startTime);
+    }
+
+    public LocalDateTime getEndTime() {
+      return parseTime(endTime);
+    }
+
+    public Set<String> getPermittedLogLevels() {
+      if (logLevel == null || StringUtils.isBlank(logLevel)) {
+        return LogFilter.allLogLevels();
+      }
+
+      if (logLevelOnly) {
+        return Stream.of(logLevel).collect(toSet());
+      }
+
+      // Return all log levels lower than or equal to the specified logLevel
+      return Arrays.stream(InternalLogWriter.levelNames).filter((String level) -> {
+        int logLevelCode = LogWriterImpl.levelNameToCode(level);
+        int logLevelCodeThreshold = LogWriterImpl.levelNameToCode(logLevel);
+
+        return logLevelCode >= logLevelCodeThreshold;
+      }).collect(toSet());
+    }
+
+    private static LocalDateTime parseTime(String dateString) {
+      if (dateString == null) {
+        return null;
+      }
+
+      try {
+        SimpleDateFormat df = new SimpleDateFormat(MiscellaneousCommands.FORMAT);
+        return df.parse(dateString).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
+      } catch (ParseException e) {
+        try {
+          SimpleDateFormat df = new SimpleDateFormat(MiscellaneousCommands.ONLY_DATE_FORMAT);
+          return df.parse(dateString).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
+        } catch (ParseException e1) {
+          return null;
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
new file mode 100644
index 0000000..a8b7225
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.geode.cache.CacheWriterException;
+import org.apache.geode.cache.EntryEvent;
+import org.apache.geode.cache.util.CacheWriterAdapter;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Map;
+
+public class ExportLogsCacheWriter extends CacheWriterAdapter implements Serializable {
+  private Path currentFile;
+  private BufferedOutputStream currentOutputStream;
+
+  @Override
+  public void beforeCreate(EntryEvent event) throws CacheWriterException {
+    if (currentFile.getFileName().endsWith("server-2.zip")) {
+      System.out.println("We got data from server 2");
+    }
+    if (currentOutputStream == null) {
+      throw new IllegalStateException("No outputStream is open.  You must call startFile before sending data.");
+    }
+
+    try {
+      Object newValue = event.getNewValue();
+      if (!(newValue instanceof byte[])) {
+        throw new IllegalArgumentException(
+            "Value must be a byte[].  Recieved " + newValue.getClass().getCanonicalName());
+      }
+      currentOutputStream.write((byte[]) newValue);
+    } catch (IOException e) {
+      throw new CacheWriterException(e);
+    }
+  }
+
+  public void startFile(String memberId) throws IOException {
+    if (currentFile != null || currentOutputStream != null) {
+      throw new IllegalStateException("Cannot open more than one file at once");
+    }
+
+    currentFile = Files.createTempDirectory(memberId).resolve(memberId + ".zip");
+    currentOutputStream = new BufferedOutputStream(new FileOutputStream(currentFile.toFile()));
+  }
+
+  public Path endFile() {
+    Path completedFile = currentFile;
+
+    IOUtils.closeQuietly(currentOutputStream);
+    currentOutputStream = null;
+    currentFile = null;
+
+    return completedFile;
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
new file mode 100644
index 0000000..9a79fc0
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import java.util.Map;
+
+public class ExportLogsRepository {
+
+
+  Map exportFiles;
+
+  public void addFile() {
+
+  }
+
+  public void deleteFile() {
+
+  }
+
+  private void cleanUpExpiredFiles() {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogExporter.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogExporter.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogExporter.java
new file mode 100644
index 0000000..f30d686
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogExporter.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import static java.util.stream.Collectors.toList;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.geode.cache.AttributesFactory;
+import org.apache.geode.cache.DataPolicy;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.Scope;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalRegionArguments;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.logging.log4j.Logger;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.util.List;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
+
+public class LogExporter {
+  private static final Logger LOGGER = LogService.getLogger();
+
+  private final LogFilter logFilter;
+
+  public LogExporter(LogFilter logFilter) throws ParseException {
+    this.logFilter = logFilter;
+  }
+
+  public Path export(Path workingDir) throws IOException {
+    LOGGER.debug("Working directory is {}", workingDir);
+
+    Path tempDirectory = Files.createTempDirectory("exportLogs");
+
+    for (Path logFile : findLogFiles(workingDir)) {
+      Path filteredLogFile = tempDirectory.resolve(logFile.getFileName());
+
+      if (this.logFilter == null) {
+        Files.copy(logFile, filteredLogFile);
+      } else {
+        writeFilteredLogFile(logFile, filteredLogFile);
+      }
+    }
+
+    for (Path statFile : findStatFiles(workingDir)) {
+      Files.copy(statFile, tempDirectory);
+    }
+
+    Path zipFile = Files.createTempFile("logExport", ".zip");
+    ZipUtils.zipDirectory(tempDirectory, zipFile);
+    LOGGER.info("Zipped files to: " + zipFile);
+
+
+    FileUtils.deleteDirectory(tempDirectory.toFile());
+
+    return zipFile;
+  }
+
+  protected void writeFilteredLogFile(Path originalLogFile, Path filteredLogFile)
+      throws IOException {
+    this.logFilter.startNewFile();
+
+    try (BufferedReader reader = new BufferedReader(new FileReader(originalLogFile.toFile()))) {
+      try (BufferedWriter writer = new BufferedWriter(new FileWriter(filteredLogFile.toFile()))) {
+
+        String line;
+        while ((line = reader.readLine()) != null) {
+          LogFilter.LineFilterResult result = this.logFilter.acceptsLine(line);
+
+          if (result == LogFilter.LineFilterResult.REMAINDER_OF_FILE_REJECTED) {
+            break;
+          }
+
+          if (result == LogFilter.LineFilterResult.LINE_ACCEPTED) {
+            writeLine(line, writer);
+          }
+        }
+      }
+    }
+  }
+
+  private void writeLine(String line, BufferedWriter writer) {
+    try {
+      writer.write(line);
+      writer.newLine();
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to write to log file", e);
+    }
+  }
+
+  protected List<Path> findLogFiles(Path workingDir) throws IOException {
+    Predicate<Path> logFileSelector = (Path file) -> file.toString().toLowerCase().endsWith(".log");
+    return findFiles(workingDir, logFileSelector);
+  }
+
+
+  protected List<Path> findStatFiles(Path workingDir) throws IOException {
+    Predicate<Path> statFileSelector =
+        (Path file) -> file.toString().toLowerCase().endsWith(".gfs");
+    return findFiles(workingDir, statFileSelector);
+  }
+
+  private List<Path> findFiles(Path workingDir, Predicate<Path> fileSelector) throws IOException {
+    Stream<Path> selectedFiles =
+        Files.list(workingDir).filter(fileSelector).filter(this.logFilter::acceptsFile);
+
+    return selectedFiles.collect(toList());
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogFilter.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogFilter.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogFilter.java
new file mode 100644
index 0000000..2436530
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogFilter.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import static java.util.stream.Collectors.toSet;
+
+import org.apache.geode.internal.logging.InternalLogWriter;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.logging.log4j.Logger;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.util.Arrays;
+import java.util.Set;
+
+public class LogFilter {
+  public enum LineFilterResult {
+    LINE_ACCEPTED, LINE_REJECTED, REMAINDER_OF_FILE_REJECTED
+  }
+
+  private static final Logger LOGGER = LogService.getLogger();
+
+  private final Set<String> permittedLogLevels;
+  private final LocalDateTime startDate;
+  private final LocalDateTime endDate;
+
+  private LineFilterResult resultOfPreviousLine = LineFilterResult.LINE_ACCEPTED;
+
+  public LogFilter(Set<String> permittedLogLevels, LocalDateTime startDate, LocalDateTime endDate) {
+    this.permittedLogLevels = (permittedLogLevels == null || permittedLogLevels.isEmpty())
+        ? allLogLevels() : permittedLogLevels;
+    this.startDate = startDate;
+    this.endDate = endDate;
+  }
+
+  public void startNewFile() {
+    this.resultOfPreviousLine = LineFilterResult.LINE_ACCEPTED;
+  }
+
+  public LineFilterResult acceptsLine(String logLine) {
+    LogLevelExtractor.Result result = LogLevelExtractor.extract(logLine);
+
+    return acceptsLogEntry(result);
+  }
+
+  protected LineFilterResult acceptsLogEntry(LogLevelExtractor.Result result) {
+    if (result == null) {
+      return resultOfPreviousLine;
+    }
+
+    return acceptsLogEntry(result.getLogLevel(), result.getLogTimestamp());
+  }
+
+  protected LineFilterResult acceptsLogEntry(String logLevel, LocalDateTime logTimestamp) {
+    if (logTimestamp == null || logLevel == null) {
+      throw new IllegalArgumentException();
+    }
+
+    LineFilterResult result;
+
+    if (endDate != null && logTimestamp.isAfter(endDate)) {
+      result = LineFilterResult.REMAINDER_OF_FILE_REJECTED;
+    } else if (startDate != null && logTimestamp.isBefore(startDate)) {
+      result = LineFilterResult.LINE_REJECTED;
+    } else {
+      result = permittedLogLevels.contains(logLevel) ? LineFilterResult.LINE_ACCEPTED
+          : LineFilterResult.LINE_REJECTED;
+    }
+
+    resultOfPreviousLine = result;
+
+    return result;
+  }
+
+  public boolean acceptsFile(Path file) {
+    if (startDate == null) {
+      return true;
+    }
+    try {
+      return (getEndTimeOf(file).isAfter(startDate));
+    } catch (IOException e) {
+      LOGGER.error("Unable to determine lastModified time", e);
+      return true;
+    }
+  }
+
+  private static LocalDateTime getEndTimeOf(Path file) throws IOException {
+    long lastModifiedMillis = file.toFile().lastModified();
+    return Instant.ofEpochMilli(lastModifiedMillis).atZone(ZoneId.systemDefault())
+        .toLocalDateTime();
+  }
+
+  public static Set<String> allLogLevels() {
+    return Arrays.stream(InternalLogWriter.levelNames).collect(toSet());
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogLevelExtractor.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogLevelExtractor.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogLevelExtractor.java
new file mode 100644
index 0000000..6c15d15
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/LogLevelExtractor.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class LogLevelExtractor {
+  private static Pattern LOG_PATTERN =
+      Pattern.compile("^\\[(\\S*)\\s+([\\d\\/]+)\\s+([\\d:\\.]+)\\s+(\\S+)");
+
+  private static DateTimeFormatter LOG_TIMESTAMP_FORMATTER =
+      DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm:ss.SSS zzz");
+  private static final String SPACE = " ";
+
+  public static Result extract(String logLine) {
+    Matcher m = LOG_PATTERN.matcher(logLine);
+    if (!m.find()) {
+      return null;
+    }
+
+    String logLevel = m.group(1);
+    String logTimestamp = m.group(2) + SPACE + m.group(3) + SPACE + m.group(4);
+
+    LocalDateTime timestamp = LocalDateTime.parse(logTimestamp, LOG_TIMESTAMP_FORMATTER);
+
+    return new Result(logLevel, timestamp);
+  }
+
+  public static class Result {
+    private String logLevel;
+    private LocalDateTime logTimestamp;
+
+    public Result(String logLevel, LocalDateTime logTimestamp) {
+      this.logLevel = logLevel;
+      this.logTimestamp = logTimestamp;
+    }
+
+    public String getLogLevel() {
+      return logLevel;
+    }
+
+    public LocalDateTime getLogTimestamp() {
+      return logTimestamp;
+    }
+
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
index 8d2ef45..67d5473 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
@@ -14,17 +14,30 @@
  */
 package org.apache.geode.management.internal.cli.util;
 
+import static java.util.stream.Collectors.toList;
+
+import java.io.BufferedReader;
 import java.io.File;
 
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.FilenameFilter;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.io.PrintWriter;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.internal.logging.MergeLogFiles;
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.cli.GfshParser;
+import org.apache.geode.management.internal.cli.LogWrapper;
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
+import org.apache.geode.management.internal.cli.result.ResultBuilder;
 
 /**
  * 
@@ -32,17 +45,54 @@ import org.apache.geode.management.internal.cli.i18n.CliStrings;
  */
 
 public class MergeLogs {
-  /**
-   * @param args
-   */
+
+  public static void mergeLogsInNewProcess(Path logDirectory) {
+    // create a new process for merging
+    LogWrapper.getInstance().fine("Exporting logs merging logs" + logDirectory);
+    List<String> commandList = new ArrayList<String>();
+    commandList.add(System.getProperty("java.home") + File.separatorChar + "bin"
+        + File.separatorChar + "java");
+    commandList.add("-classpath");
+    commandList.add(System.getProperty("java.class.path", "."));
+    commandList.add(MergeLogs.class.getName());
+
+    commandList
+        .add(logDirectory.toAbsolutePath().toString());
+
+    ProcessBuilder procBuilder = new ProcessBuilder(commandList);
+    StringBuilder output = new StringBuilder();
+    String errorString = new String();
+    try {
+      LogWrapper.getInstance().fine("Exporting logs now merging logs");
+      Process mergeProcess = procBuilder.redirectErrorStream(true).start();
+
+      mergeProcess.waitFor();
+
+      InputStream inputStream = mergeProcess.getInputStream();
+      BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
+      String line = null;
+
+      while ((line = br.readLine()) != null) {
+        output.append(line).append(GfshParser.LINE_SEPARATOR);
+      }
+      mergeProcess.destroy();
+    } catch (Exception e) {
+      LogWrapper.getInstance().severe(e.getMessage());
+    }
+    if (output.toString().contains("Merged logs to: ")) {
+      LogWrapper.getInstance().fine("Exporting logs Sucessfully merged logs");
+    } else {
+      LogWrapper.getInstance().severe("Could not merge");
+    }
+  }
 
   public static void main(String[] args) {
     if (args.length < 1 || args.length > 1) {
       throw new IllegalArgumentException("Requires only 1  arguments : <targetDirName>");
     }
     try {
-      String result = mergeLogFile(args[0]);
-      System.out.println(result);
+      String result = mergeLogFile(args[0]).getCanonicalPath();
+      System.out.println("Merged logs to: " + result);
     } catch (Exception e) {
       System.out.println(e.getMessage());
     }
@@ -50,27 +100,32 @@ public class MergeLogs {
 
   }
 
-  static String mergeLogFile(String dirName) throws Exception {
-    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
-    File dir = new File(dirName);
-    String[] logsToMerge = dir.list();
-    InputStream[] logFiles = new FileInputStream[logsToMerge.length];
+  protected static List<File> findLogFilesToMerge (File dir) {
+    return FileUtils.listFiles(dir, new String[]{"log"}, true).stream().collect(toList());
+  }
+
+  static File mergeLogFile(String dirName) throws Exception {
+    Path dir = Paths.get(dirName);
+    List<File> logsToMerge = findLogFilesToMerge(dir.toFile());
+    InputStream[] logFiles = new FileInputStream[logsToMerge.size()];
     String[] logFileNames = new String[logFiles.length];
-    for (int i = 0; i < logsToMerge.length; i++) {
+    for (int i = 0; i < logsToMerge.size(); i++) {
       try {
-        logFiles[i] = new FileInputStream(dirName + File.separator + logsToMerge[i]);
-        logFileNames[i] = dirName + File.separator + logsToMerge[i];
+        logFiles[i] = new FileInputStream(logsToMerge.get(i));
+        logFileNames[i] = dir.relativize(logsToMerge.get(i).toPath()).toString();
       } catch (FileNotFoundException e) {
         throw new Exception(
-            logsToMerge[i] + " " + CliStrings.EXPORT_LOGS__MSG__FILE_DOES_NOT_EXIST);
+            logsToMerge.get(i) + " " + CliStrings.EXPORT_LOGS__MSG__FILE_DOES_NOT_EXIST);
       }
     }
 
     PrintWriter mergedLog = null;
+    File mergedLogFile = null;
     try {
       String mergeLog =
-          dirName + File.separator + "merge_" + sdf.format(new java.util.Date()) + ".log";
-      mergedLog = new PrintWriter(mergeLog);
+          dirName + File.separator + "merge_" + new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new java.util.Date()) + ".log";
+      mergedLogFile = new File(mergeLog);
+      mergedLog = new PrintWriter(mergedLogFile);
       boolean flag = MergeLogFiles.mergeLogFiles(logFiles, logFileNames, mergedLog);
     } catch (FileNotFoundException e) {
       throw new Exception(
@@ -79,7 +134,7 @@ public class MergeLogs {
       throw new Exception("Exception in creating PrintWriter in MergeLogFiles" + e.getMessage());
     }
 
-    return "Sucessfully merged logs";
+    return mergedLogFile;
   }
 
 

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
index 81161d6..e32803a 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
@@ -41,8 +41,12 @@ import org.apache.commons.io.IOUtils;
 public class ZipUtils {
 
   public static void zipDirectory(Path sourceDirectory, Path targetFile) throws IOException {
-    Path p = Files.createFile(targetFile);
-    try (ZipOutputStream zs = new ZipOutputStream(Files.newOutputStream(p))) {
+    Path parentDir = targetFile.getParent();
+    if (parentDir != null && !parentDir.toFile().exists()) {
+      parentDir.toFile().mkdirs();
+    }
+
+    try (ZipOutputStream zs = new ZipOutputStream(Files.newOutputStream(targetFile))) {
       Files.walk(sourceDirectory).filter(path -> !Files.isDirectory(path)).forEach(path -> {
         ZipEntry zipEntry = new ZipEntry(sourceDirectory.relativize(path).toString());
         try {

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
new file mode 100644
index 0000000..43ee742
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
@@ -0,0 +1,363 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.commands;
+
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toSet;
+import static org.apache.geode.management.internal.cli.commands.MiscellaneousCommands.FORMAT;
+import static org.apache.geode.management.internal.cli.commands.MiscellaneousCommands.ONLY_DATE_FORMAT;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.geode.cache.Cache;
+import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.internal.cli.functions.ExportLogsFunction;
+import org.apache.geode.management.internal.cli.result.CommandResult;
+import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.geode.test.dunit.IgnoredException;
+import org.apache.geode.test.dunit.rules.GfshShellConnectionRule;
+import org.apache.geode.test.dunit.rules.Locator;
+import org.apache.geode.test.dunit.rules.LocatorServerStartupRule;
+import org.apache.geode.test.dunit.rules.Member;
+import org.apache.geode.test.dunit.rules.Server;
+import org.apache.logging.log4j.Logger;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.charset.Charset;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Properties;
+import java.util.Set;
+import java.util.stream.Stream;
+
+
+public class ExportLogsDUnit {
+  private static final String ERROR_LOG_PREFIX = "[IGNORE]";
+
+  @Rule
+  public LocatorServerStartupRule lsRule = new LocatorServerStartupRule();
+
+  @Rule
+  public GfshShellConnectionRule gfshConnector = new GfshShellConnectionRule();
+
+  private Locator locator;
+  private Server server1;
+  private Server server2;
+
+  private Map<Member, List<LogLine>> expectedMessages;
+
+  @Before
+  public void setup() throws Exception {
+    Properties properties = new Properties();
+    properties.setProperty(ConfigurationProperties.LOG_LEVEL, "debug");
+
+    locator = lsRule.startLocatorVM(0, properties);
+    server1 = lsRule.startServerVM(1, properties, locator.getPort());
+    server2 = lsRule.startServerVM(2, properties, locator.getPort());
+
+    IgnoredException.addIgnoredException(ERROR_LOG_PREFIX);
+
+    expectedMessages = new HashMap<>();
+    expectedMessages.put(locator, listOfLogLines(locator, "info", "error", "debug"));
+    expectedMessages.put(server1, listOfLogLines(server1, "info", "error", "debug"));
+    expectedMessages.put(server2, listOfLogLines(server2, "info", "error", "debug"));
+
+    // log the messages in each of the members
+    for (Member member : expectedMessages.keySet()) {
+      List<LogLine> logLines = expectedMessages.get(member);
+
+      member.invoke(() -> {
+        Logger logger = LogService.getLogger();
+        logLines.forEach((LogLine logLine) -> logLine.writeLog(logger));
+      });
+    }
+
+    gfshConnector.connectAndVerify(locator);
+  }
+
+  @Test
+  public void startAndEndDateCanExcludeLogs() throws Exception {
+    ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault());
+    ZonedDateTime yesterday = now.minusDays(1);
+    ZonedDateTime twoDaysAgo = now.minusDays(2);
+
+    DateTimeFormatter dateTimeFormatter =  DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT);
+
+    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(twoDaysAgo));
+    commandStringBuilder.addOption("end-time", dateTimeFormatter.format(yesterday));
+    commandStringBuilder.addOption("log-level", "debug");
+    commandStringBuilder.addOption("dir", "someDir");
+
+    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
+
+    Set<String> acceptedLogLevels = new HashSet<>();
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void startAndEndDateCanIncludeLogs() throws Exception {
+    ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault());
+    ZonedDateTime yesterday = now.minusDays(1);
+    ZonedDateTime tomorrow = now.plusDays(1);
+
+    DateTimeFormatter dateTimeFormatter =  DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT);
+
+    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(yesterday));
+    commandStringBuilder.addOption("end-time", dateTimeFormatter.format(tomorrow));
+    commandStringBuilder.addOption("log-level", "debug");
+    commandStringBuilder.addOption("dir", "someDir");
+
+    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
+
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void testExportWithStartAndEndDateTimeFiltering() throws Exception {
+    ZonedDateTime cutoffTime = LocalDateTime.now().atZone(ZoneId.systemDefault());
+
+    String messageAfterCutoffTime = "[this message should not show up since it is after cutoffTime]";
+    LogLine logLineAfterCutoffTime = new LogLine(messageAfterCutoffTime, "info",  true);
+    server1.invoke(() -> {
+      Logger logger = LogService.getLogger();
+      logLineAfterCutoffTime.writeLog(logger);
+    });
+
+    DateTimeFormatter dateTimeFormatter =  DateTimeFormatter.ofPattern(FORMAT);
+    String cutoffTimeString = dateTimeFormatter.format(cutoffTime);
+
+    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(cutoffTime.minusHours(1)));
+    commandStringBuilder.addOption("end-time", cutoffTimeString);
+    commandStringBuilder.addOption("log-level", "debug");
+    commandStringBuilder.addOption("dir", "someDir");
+
+    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
+
+    expectedMessages.get(server1).add(logLineAfterCutoffTime);
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void testExportWithThresholdLogLevelFilter() throws Exception {
+
+    CommandResult result = gfshConnector.executeAndVerifyCommand(
+        "export logs --log-level=info --only-log-level=false --dir=" + lsRule.getTempFolder()
+            .getRoot().getCanonicalPath());
+
+    Set<String> acceptedLogLevels = Stream.of("info", "error").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+
+  }
+
+  @Test
+  public void testExportWithExactLogLevelFilter() throws Exception {
+    CommandResult result = gfshConnector.executeAndVerifyCommand(
+        "export logs --log-level=info --only-log-level=true --dir=" + lsRule.getTempFolder()
+            .getRoot().getCanonicalPath());
+
+
+    Set<String> acceptedLogLevels = Stream.of("info").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void testExportWithNoFilters() throws Exception {
+    CommandResult result = gfshConnector.executeAndVerifyCommand(
+        "export logs  --dir=" + "someDir" /*  lsRule.getTempFolder().getRoot().getCanonicalPath() */);
+
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+
+    // Ensure export logs region gets cleaned up
+    server1.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
+    server2.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
+    locator.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
+  }
+
+@Test
+public void exportLogsRegionIsCleanedUpProperly() throws IOException, ClassNotFoundException {
+    locator.invoke(() -> {
+      ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
+      Cache cache = GemFireCacheImpl.getInstance();
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
+    });
+
+    server1.invoke(() -> {
+      ExportLogsFunction.createOrGetExistingExportLogsRegion(false);
+      Cache cache = GemFireCacheImpl.getInstance();
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
+    });
+
+    locator.invoke(() -> {
+      ExportLogsFunction.destroyExportLogsRegion();
+
+      Cache cache = GemFireCacheImpl.getInstance();
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+    });
+
+    server1.invoke(() -> {
+      Cache cache = GemFireCacheImpl.getInstance();
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+    });
+}
+
+
+  public void verifyZipFileContents(Set<String> acceptedLogLevels)
+      throws IOException {
+    File unzippedLogFileDir = unzipExportedLogs();
+
+    Set<File> dirsFromZipFile =
+        Stream.of(unzippedLogFileDir.listFiles()).filter(File::isDirectory).collect(toSet());
+    assertThat(dirsFromZipFile).hasSize(expectedMessages.keySet().size());
+
+    Set<String> expectedDirNames =
+        expectedMessages.keySet().stream().map(Member::getName).collect(toSet());
+    Set<String> actualDirNames = dirsFromZipFile.stream().map(File::getName).collect(toSet());
+    assertThat(actualDirNames).isEqualTo(expectedDirNames);
+
+    System.out.println("Unzipped artifacts:");
+    for (File dir : dirsFromZipFile) {
+      verifyLogFileContents(acceptedLogLevels, dir);
+    }
+  }
+
+  public void verifyLogFileContents(Set<String> acceptedLogLevels, File dirForMember)
+      throws IOException {
+
+    String memberName = dirForMember.getName();
+    Member member = expectedMessages.keySet().stream()
+        .filter((Member aMember) -> aMember.getName().equals(memberName))
+        .findFirst()
+        .get();
+
+    assertThat(member).isNotNull();
+
+    Set<String> fileNamesInDir =
+        Stream.of(dirForMember.listFiles()).map(File::getName).collect(toSet());
+
+    System.out.println(dirForMember.getCanonicalPath() + " : " + fileNamesInDir);
+
+    File logFileForMember = new File(dirForMember, memberName + ".log");
+    assertThat(logFileForMember).exists();
+    assertThat(fileNamesInDir).hasSize(1);
+
+    String logFileContents =
+        FileUtils.readLines(logFileForMember, Charset.defaultCharset()).stream()
+            .collect(joining("\n"));
+
+    for (LogLine logLine : expectedMessages.get(member)) {
+      boolean shouldExpectLogLine = acceptedLogLevels.contains(logLine.level) && !logLine.shouldBeIgnoredDueToTimestamp;
+
+      if (shouldExpectLogLine) {
+        assertThat(logFileContents).contains(logLine.getMessage());
+      } else {
+        assertThat(logFileContents).doesNotContain(logLine.getMessage());
+      }
+    }
+
+  }
+
+  private File unzipExportedLogs() throws IOException {
+    File locatorWorkingDir = locator.getWorkingDir();
+    List<File> filesInDir = Stream.of(locatorWorkingDir.listFiles()).collect(toList());
+    assertThat(filesInDir).isNotEmpty();
+
+
+    List<File> zipFilesInDir = Stream.of(locatorWorkingDir.listFiles())
+        .filter(f -> f.getName().endsWith(".zip")).collect(toList());
+    assertThat(zipFilesInDir).describedAs(filesInDir.stream().map(File::getAbsolutePath).collect(joining(","))).hasSize(1);
+
+    File unzippedLogFileDir = lsRule.getTempFolder().newFolder("unzippedLogs");
+    ZipUtils.unzip(zipFilesInDir.get(0).getCanonicalPath(), unzippedLogFileDir.getCanonicalPath());
+    return unzippedLogFileDir;
+  }
+
+  private List<LogLine> listOfLogLines(Member member, String... levels) {
+    return Stream.of(levels).map(level -> new LogLine(member, level)).collect(toList());
+  }
+
+  private static void verifyExportLogsRegionWasDestroyed() {
+    Cache cache = GemFireCacheImpl.getInstance();
+    assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+  }
+
+  public static class LogLine implements Serializable {
+    String level;
+    String message;
+    boolean shouldBeIgnoredDueToTimestamp;
+
+    public LogLine(String message, String level, boolean shouldBeIgnoredDueToTimestamp) {
+      this.message = message;
+      this.level = level;
+      this.shouldBeIgnoredDueToTimestamp = shouldBeIgnoredDueToTimestamp;
+    }
+
+    public LogLine(Member member, String level) {
+      this.level = level;
+      this.message = buildMessage(member.getName());
+    }
+
+    public String getMessage() {
+      return message;
+    }
+
+    private String buildMessage(String memberName) {
+      StringBuilder stringBuilder = new StringBuilder();
+      if (Objects.equals(level, "error")) {
+        stringBuilder.append(ERROR_LOG_PREFIX);
+      }
+      stringBuilder.append(level);
+
+      return stringBuilder.append(memberName).toString();
+    }
+
+
+    public void writeLog(Logger logger) {
+      switch (this.level) {
+        case "info":
+          logger.info(getMessage());
+          break;
+        case "error":
+          logger.error(getMessage());
+          break;
+        case "debug":
+          logger.debug(getMessage());
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/4c6f3695/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
deleted file mode 100644
index cb9a8c6..0000000
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.geode.management.internal.cli.commands;
-
-import static org.apache.geode.test.dunit.Assert.assertEquals;
-import static org.apache.geode.test.dunit.Assert.fail;
-import static org.apache.geode.test.dunit.LogWriterUtils.getLogWriter;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.geode.cache.Cache;
-import org.apache.geode.cache.Region;
-import org.apache.geode.cache.RegionFactory;
-import org.apache.geode.cache.RegionShortcut;
-import org.apache.geode.internal.logging.LogWriterImpl;
-import org.apache.geode.management.cli.Result;
-import org.apache.geode.management.internal.cli.result.CommandResult;
-import org.apache.geode.test.dunit.Host;
-import org.apache.geode.test.dunit.SerializableRunnable;
-import org.apache.geode.test.dunit.VM;
-import org.apache.geode.test.junit.categories.DistributedTest;
-import org.apache.geode.test.junit.categories.FlakyTest;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-/**
- * Dunit class for testing gemfire function commands : export logs
- */
-@Category(DistributedTest.class)
-public class MiscellaneousCommandsExportLogsPart1DUnitTest extends CliCommandTestBase {
-
-  private static final long serialVersionUID = 1L;
-
-  void setupForExportLogs() {
-    final VM vm1 = Host.getHost(0).getVM(1);
-    setUpJmxManagerOnVm0ThenConnect(null);
-
-    vm1.invoke(new SerializableRunnable() {
-      public void run() {
-        // no need to close cache as it will be closed as part of teardown2
-        Cache cache = getCache();
-
-        RegionFactory<Integer, Integer> dataRegionFactory =
-            cache.createRegionFactory(RegionShortcut.PARTITION);
-        Region region = dataRegionFactory.create("testRegion");
-        for (int i = 0; i < 5; i++) {
-          region.put("key" + (i + 200), "value" + (i + 200));
-        }
-      }
-    });
-  }
-
-  String getCurrentTimeString() {
-    SimpleDateFormat sf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS_z");
-    Date startDate = new Date(System.currentTimeMillis());
-    String formattedStartDate = sf.format(startDate);
-    return ("_" + formattedStartDate);
-  }
-
-  @Test
-  public void testExportLogs() throws IOException {
-    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
-    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
-    String start = sf.format(startDate);
-
-    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
-    String end = sf.format(enddate);
-    String dir = getCurrentTimeString();
-
-    setupForExportLogs();
-    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
-
-    MiscellaneousCommands misc = new MiscellaneousCommands();
-    getCache();
-
-    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogs" + dir, null, null, logLevel,
-        false, false, start, end, 1);
-
-    getLogWriter().info("testExportLogs command result =" + cmdResult);
-
-    if (cmdResult != null) {
-      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
-      getLogWriter().info("testExportLogs cmdStringRsult=" + cmdStringRsult);
-      assertEquals(Result.Status.OK, cmdResult.getStatus());
-    } else {
-      fail("testExportLogs failed as did not get CommandResult");
-    }
-    FileUtils.deleteDirectory(new File("./testExportLogs" + dir));
-  }
-
-  @Category(FlakyTest.class) // GEODE-1477 (http)
-  @Test
-  public void testExportLogsForMerge() throws IOException {
-    setupForExportLogs();
-    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
-    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
-    String start = sf.format(startDate);
-
-    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
-    String end = sf.format(enddate);
-    String dir = getCurrentTimeString();
-
-    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
-
-    MiscellaneousCommands misc = new MiscellaneousCommands();
-    getCache();
-
-    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForMerge" + dir, null, null,
-        logLevel, false, true, start, end, 1);
-    getLogWriter().info("testExportLogsForMerge command=" + cmdResult);
-
-    if (cmdResult != null) {
-      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
-      getLogWriter().info("testExportLogsForMerge cmdStringRsult=" + cmdStringRsult);
-
-      assertEquals(Result.Status.OK, cmdResult.getStatus());
-    } else {
-      fail("testExportLogsForMerge failed as did not get CommandResult");
-    }
-    FileUtils.deleteDirectory(new File("./testExportLogsForMerge" + dir));
-  }
-}