geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jinmeil...@apache.org
Subject [6/7] geode git commit: GEODE-2267: add validation to the arguments and include export stats in the command
Date Sat, 04 Mar 2017 00:04:16 GMT
http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/main/java/org/apache/geode/management/internal/web/controllers/MiscellaneousCommandsController.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/web/controllers/MiscellaneousCommandsController.java b/geode-core/src/main/java/org/apache/geode/management/internal/web/controllers/MiscellaneousCommandsController.java
index e2f5307..d19aee1 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/web/controllers/MiscellaneousCommandsController.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/web/controllers/MiscellaneousCommandsController.java
@@ -16,12 +16,7 @@ package org.apache.geode.management.internal.web.controllers;
 
 import org.apache.geode.internal.lang.StringUtils;
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
-import org.apache.geode.management.internal.cli.result.ResultBuilder;
 import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
-import org.springframework.core.io.InputStreamResource;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestMapping;
@@ -29,9 +24,6 @@ import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.ResponseBody;
 
-import java.io.File;
-import java.io.FileInputStream;
-
 /**
  * The MiscellaneousCommandsController class implements GemFire Management REST API web service
  * endpoints for the Gfsh Miscellaneous Commands.
@@ -51,68 +43,6 @@ import java.io.FileInputStream;
 @RequestMapping(AbstractCommandsController.REST_API_VERSION)
 @SuppressWarnings("unused")
 public class MiscellaneousCommandsController extends AbstractCommandsController {
-
-  @RequestMapping(method = RequestMethod.GET, value = "/logs")
-  public ResponseEntity<InputStreamResource> exportLogs(
-      @RequestParam(value = CliStrings.EXPORT_LOGS__DIR, required = false) final String directory,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__GROUP, required = false) final String[] groups,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__MEMBER,
-          required = false) final String memberNameId,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__LOGLEVEL,
-          required = false) final String logLevel,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL,
-          defaultValue = "false") final Boolean onlyLogLevel,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__MERGELOG,
-          defaultValue = "false") final Boolean mergeLog,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__STARTTIME,
-          required = false) final String startTime,
-      @RequestParam(value = CliStrings.EXPORT_LOGS__ENDTIME,
-          required = false) final String endTime) {
-    final CommandStringBuilder command = new CommandStringBuilder(CliStrings.EXPORT_LOGS);
-
-    command.addOption(CliStrings.EXPORT_LOGS__DIR, decode(directory));
-
-    if (hasValue(groups)) {
-      command.addOption(CliStrings.EXPORT_LOGS__GROUP,
-          StringUtils.concat(groups, StringUtils.COMMA_DELIMITER));
-    }
-
-    if (hasValue(memberNameId)) {
-      command.addOption(CliStrings.EXPORT_LOGS__MEMBER, memberNameId);
-    }
-
-    if (hasValue(logLevel)) {
-      command.addOption(CliStrings.EXPORT_LOGS__LOGLEVEL, logLevel);
-    }
-
-    command.addOption(CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL,
-        String.valueOf(Boolean.TRUE.equals(onlyLogLevel)));
-    command.addOption(CliStrings.EXPORT_LOGS__MERGELOG,
-        String.valueOf(Boolean.TRUE.equals(mergeLog)));
-
-    if (hasValue(startTime)) {
-      command.addOption(CliStrings.EXPORT_LOGS__STARTTIME, startTime);
-    }
-
-    if (hasValue(endTime)) {
-      command.addOption(CliStrings.EXPORT_LOGS__ENDTIME, endTime);
-    }
-
-    // the result is json string from CommandResult
-    String result = processCommand(command.toString());
-
-    // parse the result to get the file path
-    String filePath = ResultBuilder.fromJson(result).nextLine().trim();
-
-    HttpHeaders respHeaders = new HttpHeaders();
-    try {
-      InputStreamResource isr = new InputStreamResource(new FileInputStream(new File(filePath)));
-      return new ResponseEntity<InputStreamResource>(isr, respHeaders, HttpStatus.OK);
-    } catch (Exception ex) {
-      throw new RuntimeException("IOError writing file to output stream", ex);
-    }
-  }
-
   // TODO determine whether Async functionality is required
   @RequestMapping(method = RequestMethod.GET, value = "/stacktraces")
   @ResponseBody

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/internal/statistics/SimpleStatSamplerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/internal/statistics/SimpleStatSamplerIntegrationTest.java b/geode-core/src/test/java/org/apache/geode/internal/statistics/SimpleStatSamplerIntegrationTest.java
index 167fa3d..ae052c1 100755
--- a/geode-core/src/test/java/org/apache/geode/internal/statistics/SimpleStatSamplerIntegrationTest.java
+++ b/geode-core/src/test/java/org/apache/geode/internal/statistics/SimpleStatSamplerIntegrationTest.java
@@ -14,12 +14,18 @@
  */
 package org.apache.geode.internal.statistics;
 
-import static org.junit.Assert.*;
-
-import java.io.File;
-import java.lang.reflect.Method;
-import java.util.List;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
+import org.apache.geode.CancelCriterion;
+import org.apache.geode.Statistics;
+import org.apache.geode.StatisticsType;
+import org.apache.geode.internal.net.SocketCreator;
+import org.apache.geode.internal.stats50.VMStats50;
+import org.apache.geode.test.junit.categories.IntegrationTest;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -29,12 +35,9 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 import org.junit.rules.TestName;
 
-import org.apache.geode.CancelCriterion;
-import org.apache.geode.Statistics;
-import org.apache.geode.StatisticsType;
-import org.apache.geode.internal.net.SocketCreator;
-import org.apache.geode.internal.stats50.VMStats50;
-import org.apache.geode.test.junit.categories.IntegrationTest;
+import java.io.File;
+import java.lang.reflect.Method;
+import java.util.List;
 
 /**
  * Integration tests for {@link SimpleStatSampler}.

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/ConnectToLocatorSSLDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/ConnectToLocatorSSLDUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/ConnectToLocatorSSLDUnitTest.java
index 83a367e..101c8f5 100644
--- a/geode-core/src/test/java/org/apache/geode/management/ConnectToLocatorSSLDUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/ConnectToLocatorSSLDUnitTest.java
@@ -26,10 +26,10 @@ import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_S
 import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_SSL_KEYSTORE_TYPE;
 import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_SSL_TRUSTSTORE;
 import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_SSL_TRUSTSTORE_PASSWORD;
+import static org.apache.geode.distributed.ConfigurationProperties.SSL_CIPHERS;
 import static org.apache.geode.distributed.ConfigurationProperties.SSL_ENABLED_COMPONENTS;
 import static org.apache.geode.distributed.ConfigurationProperties.SSL_KEYSTORE;
 import static org.apache.geode.distributed.ConfigurationProperties.SSL_KEYSTORE_PASSWORD;
-import static org.apache.geode.distributed.ConfigurationProperties.SSL_KEYSTORE_TYPE;
 import static org.apache.geode.distributed.ConfigurationProperties.SSL_PROTOCOLS;
 import static org.apache.geode.distributed.ConfigurationProperties.SSL_TRUSTSTORE;
 import static org.apache.geode.distributed.ConfigurationProperties.SSL_TRUSTSTORE_PASSWORD;
@@ -100,10 +100,11 @@ public class ConnectToLocatorSSLDUnitTest {
     securityProps.setProperty(SSL_ENABLED_COMPONENTS, SecurableCommunicationChannels.JMX);
     securityProps.setProperty(SSL_KEYSTORE, jks.getCanonicalPath());
     securityProps.setProperty(SSL_KEYSTORE_PASSWORD, "password");
-    securityProps.setProperty(SSL_KEYSTORE_TYPE, "JKS");
+    // securityProps.setProperty(SSL_KEYSTORE_TYPE, "JKS");
     securityProps.setProperty(SSL_TRUSTSTORE, jks.getCanonicalPath());
     securityProps.setProperty(SSL_TRUSTSTORE_PASSWORD, "password");
-    securityProps.setProperty(SSL_PROTOCOLS, "TLSv1.2,TLSv1.1");
+    securityProps.setProperty(SSL_PROTOCOLS, "TLSv1.2");
+    securityProps.setProperty(SSL_CIPHERS, "any");
 
     setUpLocatorAndConnect(securityProps);
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/AbstractCliAroundInterceptorJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/AbstractCliAroundInterceptorJUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/AbstractCliAroundInterceptorJUnitTest.java
index c938f07..82fd4a6 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/AbstractCliAroundInterceptorJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/AbstractCliAroundInterceptorJUnitTest.java
@@ -23,6 +23,8 @@ import org.junit.experimental.categories.Category;
 import org.apache.geode.management.cli.Result;
 import org.apache.geode.test.junit.categories.UnitTest;
 
+import java.nio.file.Path;
+
 /**
  * AbstractCliAroundInterceptor Tester.
  */
@@ -38,7 +40,8 @@ public class AbstractCliAroundInterceptorJUnitTest {
       }
 
       @Override
-      public Result postExecution(final GfshParseResult parseResult, final Result commandResult) {
+      public Result postExecution(final GfshParseResult parseResult, final Result commandResult,
+          final Path tempFile) {
         return null;
       }
     };

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/HeadlessGfsh.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/HeadlessGfsh.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/HeadlessGfsh.java
index 76e986d..9ea22da 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/HeadlessGfsh.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/HeadlessGfsh.java
@@ -95,14 +95,18 @@ public class HeadlessGfsh implements ResultHandler {
 
   // TODO : Have non-blocking method also where we move executeCommand call to separate thread-pool
   public boolean executeCommand(String command) {
-    boolean status = false;
+    boolean success = false;
     try {
       outputString = null;
-      status = shell.executeScriptLine(command);
+      success = shell.executeScriptLine(command);
     } catch (Exception e) {
       outputString = e.getMessage();
     }
-    return status;
+    if (!success && shell.output != null) {
+      outputString = shell.output.toString();
+      shell.output.reset();
+    }
+    return success;
   }
 
   public int getCommandExecutionStatus() {

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/CliCommandTestBase.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/CliCommandTestBase.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/CliCommandTestBase.java
index 21426d6..165f664 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/CliCommandTestBase.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/CliCommandTestBase.java
@@ -14,22 +14,17 @@
  */
 package org.apache.geode.management.internal.cli.commands;
 
-import static org.apache.geode.distributed.ConfigurationProperties.*;
-import static org.apache.geode.test.dunit.Assert.*;
-import static org.apache.geode.test.dunit.LogWriterUtils.*;
-
-import java.io.IOException;
-import java.io.PrintStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Map;
-import java.util.Properties;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.geode.security.TestSecurityManager;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
+import static org.apache.geode.distributed.ConfigurationProperties.HTTP_SERVICE_PORT;
+import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER;
+import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_BIND_ADDRESS;
+import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_PORT;
+import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_START;
+import static org.apache.geode.distributed.ConfigurationProperties.NAME;
+import static org.apache.geode.test.dunit.Assert.assertEquals;
+import static org.apache.geode.test.dunit.Assert.assertFalse;
+import static org.apache.geode.test.dunit.Assert.assertNotNull;
+import static org.apache.geode.test.dunit.Assert.assertTrue;
+import static org.apache.geode.test.dunit.LogWriterUtils.getLogWriter;
 
 import org.apache.geode.cache.Cache;
 import org.apache.geode.internal.AvailablePortHelper;
@@ -41,13 +36,27 @@ import org.apache.geode.management.internal.cli.parser.CommandTarget;
 import org.apache.geode.management.internal.cli.result.CommandResult;
 import org.apache.geode.management.internal.cli.shell.Gfsh;
 import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
+import org.apache.geode.security.TestSecurityManager;
 import org.apache.geode.test.dunit.Host;
 import org.apache.geode.test.dunit.IgnoredException;
 import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
 import org.apache.geode.test.dunit.rules.DistributedRestoreSystemProperties;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Map;
+import java.util.Properties;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 /**
  * Base class for all the CLI/gfsh command dunit tests.
+ * 
+ * @deprecated use LocatorServerStartupRule and GfshShellConnectorRule instead.
  */
 public abstract class CliCommandTestBase extends JUnit4CacheTestCase {
 

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
deleted file mode 100644
index a64620a..0000000
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements. See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- *
- */
-
-package org.apache.geode.management.internal.cli.commands;
-
-import static java.util.stream.Collectors.joining;
-import static java.util.stream.Collectors.toList;
-import static java.util.stream.Collectors.toSet;
-import static org.apache.geode.management.internal.cli.commands.MiscellaneousCommands.FORMAT;
-import static org.apache.geode.management.internal.cli.commands.MiscellaneousCommands.ONLY_DATE_FORMAT;
-import static org.assertj.core.api.Assertions.assertThat;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.geode.cache.Cache;
-import org.apache.geode.distributed.ConfigurationProperties;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
-import org.apache.geode.internal.logging.LogService;
-import org.apache.geode.management.internal.cli.functions.ExportLogsFunction;
-import org.apache.geode.management.internal.cli.result.CommandResult;
-import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
-import org.apache.geode.management.internal.configuration.utils.ZipUtils;
-import org.apache.geode.test.dunit.IgnoredException;
-import org.apache.geode.test.dunit.rules.GfshShellConnectionRule;
-import org.apache.geode.test.dunit.rules.Locator;
-import org.apache.geode.test.dunit.rules.LocatorServerStartupRule;
-import org.apache.geode.test.dunit.rules.Member;
-import org.apache.geode.test.dunit.rules.Server;
-import org.apache.logging.log4j.Logger;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.Serializable;
-import java.nio.charset.Charset;
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Properties;
-import java.util.Set;
-import java.util.stream.Stream;
-
-
-public class ExportLogsDUnit {
-  private static final String ERROR_LOG_PREFIX = "[IGNORE]";
-
-  @Rule
-  public LocatorServerStartupRule lsRule = new LocatorServerStartupRule();
-
-  @Rule
-  public GfshShellConnectionRule gfshConnector = new GfshShellConnectionRule();
-
-  private Locator locator;
-  private Server server1;
-  private Server server2;
-
-  private Map<Member, List<LogLine>> expectedMessages;
-
-  @Before
-  public void setup() throws Exception {
-    Properties properties = new Properties();
-    properties.setProperty(ConfigurationProperties.LOG_LEVEL, "debug");
-
-    locator = lsRule.startLocatorVM(0, properties);
-    server1 = lsRule.startServerVM(1, properties, locator.getPort());
-    server2 = lsRule.startServerVM(2, properties, locator.getPort());
-
-    IgnoredException.addIgnoredException(ERROR_LOG_PREFIX);
-
-    expectedMessages = new HashMap<>();
-    expectedMessages.put(locator, listOfLogLines(locator, "info", "error", "debug"));
-    expectedMessages.put(server1, listOfLogLines(server1, "info", "error", "debug"));
-    expectedMessages.put(server2, listOfLogLines(server2, "info", "error", "debug"));
-
-    // log the messages in each of the members
-    for (Member member : expectedMessages.keySet()) {
-      List<LogLine> logLines = expectedMessages.get(member);
-
-      member.invoke(() -> {
-        Logger logger = LogService.getLogger();
-        logLines.forEach((LogLine logLine) -> logLine.writeLog(logger));
-      });
-    }
-
-    gfshConnector.connectAndVerify(locator);
-  }
-
-  @Test
-  public void startAndEndDateCanExcludeLogs() throws Exception {
-    ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault());
-    ZonedDateTime yesterday = now.minusDays(1);
-    ZonedDateTime twoDaysAgo = now.minusDays(2);
-
-    DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT);
-
-    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
-    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(twoDaysAgo));
-    commandStringBuilder.addOption("end-time", dateTimeFormatter.format(yesterday));
-    commandStringBuilder.addOption("log-level", "debug");
-    commandStringBuilder.addOption("dir", "someDir");
-
-    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
-
-    Set<String> acceptedLogLevels = new HashSet<>();
-    verifyZipFileContents(acceptedLogLevels);
-  }
-
-  @Test
-  public void startAndEndDateCanIncludeLogs() throws Exception {
-    ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault());
-    ZonedDateTime yesterday = now.minusDays(1);
-    ZonedDateTime tomorrow = now.plusDays(1);
-
-    DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT);
-
-    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
-    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(yesterday));
-    commandStringBuilder.addOption("end-time", dateTimeFormatter.format(tomorrow));
-    commandStringBuilder.addOption("log-level", "debug");
-    commandStringBuilder.addOption("dir", "someDir");
-
-    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
-
-    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
-    verifyZipFileContents(acceptedLogLevels);
-  }
-
-  @Test
-  public void testExportWithStartAndEndDateTimeFiltering() throws Exception {
-    ZonedDateTime cutoffTime = LocalDateTime.now().atZone(ZoneId.systemDefault());
-
-    String messageAfterCutoffTime =
-        "[this message should not show up since it is after cutoffTime]";
-    LogLine logLineAfterCutoffTime = new LogLine(messageAfterCutoffTime, "info", true);
-    server1.invoke(() -> {
-      Logger logger = LogService.getLogger();
-      logLineAfterCutoffTime.writeLog(logger);
-    });
-
-    DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(FORMAT);
-    String cutoffTimeString = dateTimeFormatter.format(cutoffTime);
-
-    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
-    commandStringBuilder.addOption("start-time",
-        dateTimeFormatter.format(cutoffTime.minusHours(1)));
-    commandStringBuilder.addOption("end-time", cutoffTimeString);
-    commandStringBuilder.addOption("log-level", "debug");
-    commandStringBuilder.addOption("dir", "someDir");
-
-    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
-
-    expectedMessages.get(server1).add(logLineAfterCutoffTime);
-    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
-    verifyZipFileContents(acceptedLogLevels);
-  }
-
-  @Test
-  public void testExportWithThresholdLogLevelFilter() throws Exception {
-
-    CommandResult result = gfshConnector
-        .executeAndVerifyCommand("export logs --log-level=info --only-log-level=false --dir="
-            + lsRule.getTempFolder().getRoot().getCanonicalPath());
-
-    Set<String> acceptedLogLevels = Stream.of("info", "error").collect(toSet());
-    verifyZipFileContents(acceptedLogLevels);
-
-  }
-
-  @Test
-  public void testExportWithExactLogLevelFilter() throws Exception {
-    CommandResult result = gfshConnector
-        .executeAndVerifyCommand("export logs --log-level=info --only-log-level=true --dir="
-            + lsRule.getTempFolder().getRoot().getCanonicalPath());
-
-
-    Set<String> acceptedLogLevels = Stream.of("info").collect(toSet());
-    verifyZipFileContents(acceptedLogLevels);
-  }
-
-  @Test
-  public void testExportWithNoFilters() throws Exception {
-    CommandResult result = gfshConnector.executeAndVerifyCommand("export logs  --dir="
-        + "someDir" /* lsRule.getTempFolder().getRoot().getCanonicalPath() */);
-
-    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
-    verifyZipFileContents(acceptedLogLevels);
-
-    // Ensure export logs region gets cleaned up
-    server1.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
-    server2.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
-    locator.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
-  }
-
-  @Test
-  public void exportLogsRegionIsCleanedUpProperly() throws IOException, ClassNotFoundException {
-    locator.invoke(() -> {
-      ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
-      Cache cache = GemFireCacheImpl.getInstance();
-      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
-    });
-
-    server1.invoke(() -> {
-      ExportLogsFunction.createOrGetExistingExportLogsRegion(false);
-      Cache cache = GemFireCacheImpl.getInstance();
-      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
-    });
-
-    locator.invoke(() -> {
-      ExportLogsFunction.destroyExportLogsRegion();
-
-      Cache cache = GemFireCacheImpl.getInstance();
-      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
-    });
-
-    server1.invoke(() -> {
-      Cache cache = GemFireCacheImpl.getInstance();
-      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
-    });
-  }
-
-
-  public void verifyZipFileContents(Set<String> acceptedLogLevels) throws IOException {
-    File unzippedLogFileDir = unzipExportedLogs();
-
-    Set<File> dirsFromZipFile =
-        Stream.of(unzippedLogFileDir.listFiles()).filter(File::isDirectory).collect(toSet());
-    assertThat(dirsFromZipFile).hasSize(expectedMessages.keySet().size());
-
-    Set<String> expectedDirNames =
-        expectedMessages.keySet().stream().map(Member::getName).collect(toSet());
-    Set<String> actualDirNames = dirsFromZipFile.stream().map(File::getName).collect(toSet());
-    assertThat(actualDirNames).isEqualTo(expectedDirNames);
-
-    System.out.println("Unzipped artifacts:");
-    for (File dir : dirsFromZipFile) {
-      verifyLogFileContents(acceptedLogLevels, dir);
-    }
-  }
-
-  public void verifyLogFileContents(Set<String> acceptedLogLevels, File dirForMember)
-      throws IOException {
-
-    String memberName = dirForMember.getName();
-    Member member = expectedMessages.keySet().stream()
-        .filter((Member aMember) -> aMember.getName().equals(memberName)).findFirst().get();
-
-    assertThat(member).isNotNull();
-
-    Set<String> fileNamesInDir =
-        Stream.of(dirForMember.listFiles()).map(File::getName).collect(toSet());
-
-    System.out.println(dirForMember.getCanonicalPath() + " : " + fileNamesInDir);
-
-    File logFileForMember = new File(dirForMember, memberName + ".log");
-    assertThat(logFileForMember).exists();
-    assertThat(fileNamesInDir).hasSize(1);
-
-    String logFileContents = FileUtils.readLines(logFileForMember, Charset.defaultCharset())
-        .stream().collect(joining("\n"));
-
-    for (LogLine logLine : expectedMessages.get(member)) {
-      boolean shouldExpectLogLine =
-          acceptedLogLevels.contains(logLine.level) && !logLine.shouldBeIgnoredDueToTimestamp;
-
-      if (shouldExpectLogLine) {
-        assertThat(logFileContents).contains(logLine.getMessage());
-      } else {
-        assertThat(logFileContents).doesNotContain(logLine.getMessage());
-      }
-    }
-
-  }
-
-  private File unzipExportedLogs() throws IOException {
-    File locatorWorkingDir = locator.getWorkingDir();
-    List<File> filesInDir = Stream.of(locatorWorkingDir.listFiles()).collect(toList());
-    assertThat(filesInDir).isNotEmpty();
-
-
-    List<File> zipFilesInDir = Stream.of(locatorWorkingDir.listFiles())
-        .filter(f -> f.getName().endsWith(".zip")).collect(toList());
-    assertThat(zipFilesInDir)
-        .describedAs(filesInDir.stream().map(File::getAbsolutePath).collect(joining(",")))
-        .hasSize(1);
-
-    File unzippedLogFileDir = lsRule.getTempFolder().newFolder("unzippedLogs");
-    ZipUtils.unzip(zipFilesInDir.get(0).getCanonicalPath(), unzippedLogFileDir.getCanonicalPath());
-    return unzippedLogFileDir;
-  }
-
-  private List<LogLine> listOfLogLines(Member member, String... levels) {
-    return Stream.of(levels).map(level -> new LogLine(member, level)).collect(toList());
-  }
-
-  private static void verifyExportLogsRegionWasDestroyed() {
-    Cache cache = GemFireCacheImpl.getInstance();
-    assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
-  }
-
-  public static class LogLine implements Serializable {
-    String level;
-    String message;
-    boolean shouldBeIgnoredDueToTimestamp;
-
-    public LogLine(String message, String level, boolean shouldBeIgnoredDueToTimestamp) {
-      this.message = message;
-      this.level = level;
-      this.shouldBeIgnoredDueToTimestamp = shouldBeIgnoredDueToTimestamp;
-    }
-
-    public LogLine(Member member, String level) {
-      this.level = level;
-      this.message = buildMessage(member.getName());
-    }
-
-    public String getMessage() {
-      return message;
-    }
-
-    private String buildMessage(String memberName) {
-      StringBuilder stringBuilder = new StringBuilder();
-      if (Objects.equals(level, "error")) {
-        stringBuilder.append(ERROR_LOG_PREFIX);
-      }
-      stringBuilder.append(level);
-
-      return stringBuilder.append(memberName).toString();
-    }
-
-
-    public void writeLog(Logger logger) {
-      switch (this.level) {
-        case "info":
-          logger.info(getMessage());
-          break;
-        case "error":
-          logger.error(getMessage());
-          break;
-        case "debug":
-          logger.debug(getMessage());
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnitTest.java
new file mode 100644
index 0000000..19198f2
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnitTest.java
@@ -0,0 +1,342 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.commands;
+
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toSet;
+import static org.apache.geode.management.internal.cli.commands.ExportLogCommand.FORMAT;
+import static org.apache.geode.management.internal.cli.commands.ExportLogCommand.ONLY_DATE_FORMAT;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.geode.cache.Cache;
+import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.internal.cli.functions.ExportLogsFunction;
+import org.apache.geode.management.internal.cli.result.CommandResult;
+import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.geode.test.dunit.IgnoredException;
+import org.apache.geode.test.dunit.rules.GfshShellConnectionRule;
+import org.apache.geode.test.dunit.rules.Locator;
+import org.apache.geode.test.dunit.rules.LocatorServerStartupRule;
+import org.apache.geode.test.dunit.rules.Member;
+import org.apache.geode.test.dunit.rules.Server;
+import org.apache.geode.test.junit.categories.DistributedTest;
+import org.apache.logging.log4j.Logger;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.charset.Charset;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Properties;
+import java.util.Set;
+import java.util.stream.Stream;
+
+@Category(DistributedTest.class)
+public class ExportLogsDUnitTest {
+  private static final String ERROR_LOG_PREFIX = "[IGNORE]";
+
+  @Rule
+  public LocatorServerStartupRule lsRule = new LocatorServerStartupRule();
+
+  @Rule
+  public GfshShellConnectionRule gfshConnector = new GfshShellConnectionRule();
+
+  private Locator locator;
+  private Server server1;
+  private Server server2;
+
+  private Map<Member, List<LogLine>> expectedMessages;
+
+  @Before
+  public void setup() throws Exception {
+    Properties properties = new Properties();
+    properties.setProperty(ConfigurationProperties.LOG_LEVEL, "debug");
+
+    locator = lsRule.startLocatorVM(0, properties);
+    server1 = lsRule.startServerVM(1, properties, locator.getPort());
+    server2 = lsRule.startServerVM(2, properties, locator.getPort());
+
+    IgnoredException.addIgnoredException(ERROR_LOG_PREFIX);
+
+    expectedMessages = new HashMap<>();
+    expectedMessages.put(locator, listOfLogLines(locator, "info", "error", "debug"));
+    expectedMessages.put(server1, listOfLogLines(server1, "info", "error", "debug"));
+    expectedMessages.put(server2, listOfLogLines(server2, "info", "error", "debug"));
+
+    // log the messages in each of the members
+    for (Member member : expectedMessages.keySet()) {
+      List<LogLine> logLines = expectedMessages.get(member);
+
+      member.invoke(() -> {
+        Logger logger = LogService.getLogger();
+        logLines.forEach((LogLine logLine) -> logLine.writeLog(logger));
+      });
+    }
+
+    gfshConnector.connectAndVerify(locator);
+  }
+
+  @Test
+  public void startAndEndDateCanIncludeLogs() throws Exception {
+    ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault());
+    ZonedDateTime yesterday = now.minusDays(1);
+    ZonedDateTime tomorrow = now.plusDays(1);
+
+    DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT);
+
+    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(yesterday));
+    commandStringBuilder.addOption("end-time", dateTimeFormatter.format(tomorrow));
+    commandStringBuilder.addOption("log-level", "debug");
+    commandStringBuilder.addOption("dir", "someDir");
+
+    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
+
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void testExportWithStartAndEndDateTimeFiltering() throws Exception {
+    ZonedDateTime cutoffTime = LocalDateTime.now().atZone(ZoneId.systemDefault());
+
+    String messageAfterCutoffTime =
+        "[this message should not show up since it is after cutoffTime]";
+    LogLine logLineAfterCutoffTime = new LogLine(messageAfterCutoffTime, "info", true);
+    server1.invoke(() -> {
+      Logger logger = LogService.getLogger();
+      logLineAfterCutoffTime.writeLog(logger);
+    });
+
+    DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(FORMAT);
+    String cutoffTimeString = dateTimeFormatter.format(cutoffTime);
+
+    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("start-time",
+        dateTimeFormatter.format(cutoffTime.minusHours(1)));
+    commandStringBuilder.addOption("end-time", cutoffTimeString);
+    commandStringBuilder.addOption("log-level", "debug");
+    commandStringBuilder.addOption("dir", "someDir");
+
+    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
+
+    expectedMessages.get(server1).add(logLineAfterCutoffTime);
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void testExportWithThresholdLogLevelFilter() throws Exception {
+
+    CommandResult result = gfshConnector
+        .executeAndVerifyCommand("export logs --log-level=info --only-log-level=false");
+
+    Set<String> acceptedLogLevels = Stream.of("info", "error").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+
+  }
+
+  @Test
+  public void testExportWithExactLogLevelFilter() throws Exception {
+    CommandResult result =
+        gfshConnector.executeAndVerifyCommand("export logs --log-level=info --only-log-level=true");
+
+
+    Set<String> acceptedLogLevels = Stream.of("info").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
+  public void testExportWithNoFilters() throws Exception {
+    gfshConnector.executeAndVerifyCommand("export logs --log-level=all");
+
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+
+    // Ensure export logs region gets cleaned up
+    server1.invoke(ExportLogsDUnitTest::verifyExportLogsRegionWasDestroyed);
+    server2.invoke(ExportLogsDUnitTest::verifyExportLogsRegionWasDestroyed);
+    locator.invoke(ExportLogsDUnitTest::verifyExportLogsRegionWasDestroyed);
+  }
+
+  @Test
+  public void exportLogsRegionIsCleanedUpProperly() throws IOException, ClassNotFoundException {
+    locator.invoke(() -> {
+      GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+      ExportLogsFunction.createOrGetExistingExportLogsRegion(true, cache);
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
+    });
+
+    server1.invoke(() -> {
+      GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+      ExportLogsFunction.createOrGetExistingExportLogsRegion(false, cache);
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
+    });
+
+    locator.invoke(() -> {
+      GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+      ExportLogsFunction.destroyExportLogsRegion(cache);
+
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+    });
+
+    server1.invoke(() -> {
+      Cache cache = GemFireCacheImpl.getInstance();
+      assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+    });
+  }
+
+
+  public void verifyZipFileContents(Set<String> acceptedLogLevels) throws IOException {
+    File unzippedLogFileDir = unzipExportedLogs();
+
+    Set<File> dirsFromZipFile =
+        Stream.of(unzippedLogFileDir.listFiles()).filter(File::isDirectory).collect(toSet());
+    assertThat(dirsFromZipFile).hasSize(expectedMessages.keySet().size());
+
+    Set<String> expectedDirNames =
+        expectedMessages.keySet().stream().map(Member::getName).collect(toSet());
+    Set<String> actualDirNames = dirsFromZipFile.stream().map(File::getName).collect(toSet());
+    assertThat(actualDirNames).isEqualTo(expectedDirNames);
+
+    System.out.println("Unzipped artifacts:");
+    for (File dir : dirsFromZipFile) {
+      verifyLogFileContents(acceptedLogLevels, dir);
+    }
+  }
+
+  public void verifyLogFileContents(Set<String> acceptedLogLevels, File dirForMember)
+      throws IOException {
+
+    String memberName = dirForMember.getName();
+    Member member = expectedMessages.keySet().stream()
+        .filter((Member aMember) -> aMember.getName().equals(memberName)).findFirst().get();
+
+    assertThat(member).isNotNull();
+
+    Set<String> fileNamesInDir =
+        Stream.of(dirForMember.listFiles()).map(File::getName).collect(toSet());
+
+    System.out.println(dirForMember.getCanonicalPath() + " : " + fileNamesInDir);
+
+    File logFileForMember = new File(dirForMember, memberName + ".log");
+    assertThat(logFileForMember).exists();
+    assertThat(fileNamesInDir).hasSize(1);
+
+    String logFileContents = FileUtils.readLines(logFileForMember, Charset.defaultCharset())
+        .stream().collect(joining("\n"));
+
+    for (LogLine logLine : expectedMessages.get(member)) {
+      boolean shouldExpectLogLine =
+          acceptedLogLevels.contains(logLine.level) && !logLine.shouldBeIgnoredDueToTimestamp;
+
+      if (shouldExpectLogLine) {
+        assertThat(logFileContents).contains(logLine.getMessage());
+      } else {
+        assertThat(logFileContents).doesNotContain(logLine.getMessage());
+      }
+    }
+
+  }
+
+  private File unzipExportedLogs() throws IOException {
+    File locatorWorkingDir = locator.getWorkingDir();
+    List<File> filesInDir = Stream.of(locatorWorkingDir.listFiles()).collect(toList());
+    assertThat(filesInDir).isNotEmpty();
+
+
+    List<File> zipFilesInDir = Stream.of(locatorWorkingDir.listFiles())
+        .filter(f -> f.getName().endsWith(".zip")).collect(toList());
+    assertThat(zipFilesInDir)
+        .describedAs(filesInDir.stream().map(File::getAbsolutePath).collect(joining(",")))
+        .hasSize(1);
+
+    File unzippedLogFileDir = lsRule.getTempFolder().newFolder("unzippedLogs");
+    ZipUtils.unzip(zipFilesInDir.get(0).getCanonicalPath(), unzippedLogFileDir.getCanonicalPath());
+    return unzippedLogFileDir;
+  }
+
+  private List<LogLine> listOfLogLines(Member member, String... levels) {
+    return Stream.of(levels).map(level -> new LogLine(member, level)).collect(toList());
+  }
+
+  private static void verifyExportLogsRegionWasDestroyed() {
+    Cache cache = GemFireCacheImpl.getInstance();
+    assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+  }
+
+  public static class LogLine implements Serializable {
+    String level;
+    String message;
+    boolean shouldBeIgnoredDueToTimestamp;
+
+    public LogLine(String message, String level, boolean shouldBeIgnoredDueToTimestamp) {
+      this.message = message;
+      this.level = level;
+      this.shouldBeIgnoredDueToTimestamp = shouldBeIgnoredDueToTimestamp;
+    }
+
+    public LogLine(Member member, String level) {
+      this.level = level;
+      this.message = buildMessage(member.getName());
+    }
+
+    public String getMessage() {
+      return message;
+    }
+
+    private String buildMessage(String memberName) {
+      StringBuilder stringBuilder = new StringBuilder();
+      if (Objects.equals(level, "error")) {
+        stringBuilder.append(ERROR_LOG_PREFIX + "-");
+      }
+      stringBuilder.append(level + "-");
+
+      return stringBuilder.append(memberName).toString();
+    }
+
+
+    public void writeLog(Logger logger) {
+      switch (this.level) {
+        case "info":
+          logger.info(getMessage());
+          break;
+        case "error":
+          logger.error(getMessage());
+          break;
+        case "debug":
+          logger.debug(getMessage());
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsInterceptorJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsInterceptorJUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsInterceptorJUnitTest.java
new file mode 100644
index 0000000..97ed686
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsInterceptorJUnitTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.when;
+
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.test.junit.categories.UnitTest;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+import java.util.HashMap;
+import java.util.Map;
+
+@Category(UnitTest.class)
+public class ExportLogsInterceptorJUnitTest {
+  private ExportLogCommand.ExportLogsInterceptor interceptor;
+  private GfshParseResult parseResult;
+  private Map<String, String> arguments;
+  private Result result;
+
+  @Before
+  public void before() {
+    interceptor = new ExportLogCommand.ExportLogsInterceptor();
+    parseResult = Mockito.mock(GfshParseResult.class);
+    arguments = new HashMap<>();
+    arguments.put("log-level", "info");
+    when(parseResult.getParamValueStrings()).thenReturn(arguments);
+  }
+
+  @Test
+  public void testGroupAndMember() {
+    arguments.put("group", "group");
+    arguments.put("member", "member");
+    result = interceptor.preExecution(parseResult);
+    assertThat(result.nextLine()).contains("Can't specify both group and member");
+  }
+
+  @Test
+  public void testLogLevel() {
+    arguments.put("log-level", "test");
+    result = interceptor.preExecution(parseResult);
+    assertThat(result.nextLine()).contains("Invalid log level: test");
+  }
+
+  @Test
+  public void testStartEnd() {
+    arguments.put("start-time", "2000/01/01");
+    arguments.put("end-time", "2000/01/02");
+    result = interceptor.preExecution(parseResult);
+    assertThat(result.nextLine()).isEmpty();
+
+    arguments.put("start-time", "2000/01/02");
+    arguments.put("end-time", "2000/01/01");
+    result = interceptor.preExecution(parseResult);
+    assertThat(result.nextLine()).contains("start-time has to be earlier than end-time");
+  }
+
+  @Test
+  public void testInclideStats() {
+    arguments.put("logs-only", "true");
+    arguments.put("stats-only", "false");
+    result = interceptor.preExecution(parseResult);
+    assertThat(result.nextLine()).isEmpty();
+
+    arguments.put("logs-only", "true");
+    arguments.put("stats-only", "true");
+    result = interceptor.preExecution(parseResult);
+    assertThat(result.nextLine()).contains("logs-only and stats-only can't both be true");
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportStatsDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportStatsDUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportStatsDUnitTest.java
new file mode 100644
index 0000000..f1ba7ef
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportStatsDUnitTest.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.geode.management.internal.cli.commands;
+
+import static org.apache.geode.distributed.ConfigurationProperties.HTTP_SERVICE_BIND_ADDRESS;
+import static org.apache.geode.distributed.ConfigurationProperties.HTTP_SERVICE_PORT;
+import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_PORT;
+import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_ARCHIVE_FILE;
+import static org.apache.geode.management.internal.cli.commands.ExportLogCommand.ONLY_DATE_FORMAT;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.google.common.collect.Sets;
+
+import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.internal.AvailablePortHelper;
+import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
+import org.apache.geode.test.dunit.rules.GfshShellConnectionRule;
+import org.apache.geode.test.dunit.rules.Locator;
+import org.apache.geode.test.dunit.rules.LocatorServerStartupRule;
+import org.apache.geode.test.junit.categories.DistributedTest;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.IOException;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+
+@Category(DistributedTest.class)
+public class ExportStatsDUnitTest {
+  @ClassRule
+  public static LocatorServerStartupRule lsRule = new LocatorServerStartupRule();
+
+  @ClassRule
+  public static GfshShellConnectionRule connector = new GfshShellConnectionRule();
+
+  protected static int jmxPort, httpPort;
+  protected static Set<String> expectedZipEntries = new HashSet<>();
+  protected static Locator locator;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(2);
+    httpPort = ports[0];
+    jmxPort = ports[1];
+    Properties locatorProperties = new Properties();
+    locatorProperties.setProperty(HTTP_SERVICE_BIND_ADDRESS, "localhost");
+    locatorProperties.setProperty(HTTP_SERVICE_PORT, httpPort + "");
+    locatorProperties.setProperty(JMX_MANAGER_PORT, jmxPort + "");
+
+    // start the locator in vm0 and then connect to it over http
+    locator = lsRule.startLocatorVM(0, locatorProperties);
+
+    Properties serverProperties = new Properties();
+    serverProperties.setProperty(ConfigurationProperties.STATISTIC_SAMPLING_ENABLED, "true");
+    serverProperties.setProperty(STATISTIC_ARCHIVE_FILE, "statistics.gfs");
+    lsRule.startServerVM(1, serverProperties, locator.getPort());
+
+    expectedZipEntries = Sets.newHashSet("locator-0/locator-0.log", "server-1/server-1.log",
+        "server-1/statistics.gfs");
+  }
+
+  protected void connectIfNeeded() throws Exception {
+    if (!connector.isConnected()) {
+      connector.connect(locator);
+    }
+  }
+
+  @Test
+  public void testExportLogsAndStats() throws Exception {
+    connectIfNeeded();
+    connector.executeAndVerifyCommand("export logs");
+    String zipPath = getZipPathFromCommandResult(connector.getGfshOutput());
+    Set<String> actualZipEnries = getZipEntries(zipPath);
+
+    Set<String> expectedFiles = Sets.newHashSet("locator-0/locator-0.log", "server-1/server-1.log",
+        "server-1/statistics.gfs");
+    assertThat(actualZipEnries).isEqualTo(expectedFiles);
+  }
+
+  @Test
+  public void testExportLogsOnly() throws Exception {
+    connectIfNeeded();
+    connector.executeAndVerifyCommand("export logs --logs-only");
+    String zipPath = getZipPathFromCommandResult(connector.getGfshOutput());
+    Set<String> actualZipEnries = getZipEntries(zipPath);
+
+    Set<String> expectedFiles = Sets.newHashSet("locator-0/locator-0.log", "server-1/server-1.log");
+    assertThat(actualZipEnries).isEqualTo(expectedFiles);
+  }
+
+  @Test
+  public void testExportStatsOnly() throws Exception {
+    connectIfNeeded();
+    connector.executeAndVerifyCommand("export logs --stats-only");
+    String zipPath = getZipPathFromCommandResult(connector.getGfshOutput());
+    Set<String> actualZipEnries = getZipEntries(zipPath);
+
+    Set<String> expectedFiles = Sets.newHashSet("server-1/statistics.gfs");
+    assertThat(actualZipEnries).isEqualTo(expectedFiles);
+  }
+
+  @Test
+  public void startAndEndDateCanExcludeLogs() throws Exception {
+    connectIfNeeded();
+    ZonedDateTime now = LocalDateTime.now().atZone(ZoneId.systemDefault());
+    ZonedDateTime yesterday = now.minusDays(1);
+    ZonedDateTime twoDaysAgo = now.minusDays(2);
+
+    DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(ONLY_DATE_FORMAT);
+
+    CommandStringBuilder commandStringBuilder = new CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("start-time", dateTimeFormatter.format(twoDaysAgo));
+    commandStringBuilder.addOption("end-time", dateTimeFormatter.format(yesterday));
+    commandStringBuilder.addOption("log-level", "debug");
+
+    connector.executeAndVerifyCommand(commandStringBuilder.toString());
+    String zipPath = getZipPathFromCommandResult(connector.getGfshOutput());
+
+    Set<String> actualZipEnries = getZipEntries(zipPath);
+    assertThat(actualZipEnries).hasSize(0);
+  }
+
+  protected String getZipPathFromCommandResult(String message) {
+    return message.replaceAll("Logs exported to the connected member's file system: ", "").trim();
+  }
+
+  private static Set<String> getZipEntries(String zipFilePath) throws IOException {
+    return new ZipFile(zipFilePath).stream().map(ZipEntry::getName).collect(Collectors.toSet());
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
index bb083be..72c39dc 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
@@ -25,15 +25,14 @@ import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.cache.execute.ResultSender;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.cache.execute.FunctionContextImpl;
+import org.apache.geode.test.dunit.rules.Server;
 import org.apache.geode.test.dunit.rules.ServerStarterRule;
 import org.apache.geode.test.junit.categories.IntegrationTest;
-import org.junit.After;
+import org.apache.logging.log4j.Level;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
-import org.junit.contrib.java.lang.system.RestoreSystemProperties;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
 import java.io.IOException;
@@ -42,28 +41,15 @@ import java.io.IOException;
 public class ExportLogsFunctionIntegrationTest {
 
   @Rule
-  public TemporaryFolder temporaryFolder = new TemporaryFolder();
-
-  @Rule
-  public RestoreSystemProperties restoreSystemProperties = new RestoreSystemProperties();
-
-  @Rule
   public ServerStarterRule serverStarterRule = new ServerStarterRule();
 
+  private Server server;
   private File serverWorkingDir;
 
   @Before
   public void setup() throws Exception {
-    serverWorkingDir = temporaryFolder.newFolder("serverWorkingDir");
-    System.setProperty("user.dir", serverWorkingDir.getCanonicalPath());
-    // fix a ci pipeline glitch
-    System.clearProperty("user.home");
-    serverStarterRule.startServer();
-  }
-
-  @After
-  public void teardown() {
-    serverStarterRule.after();
+    server = serverStarterRule.startServer();
+    serverWorkingDir = server.getWorkingDir();
   }
 
   @Test
@@ -76,7 +62,15 @@ public class ExportLogsFunctionIntegrationTest {
     File notALogFile = new File(serverWorkingDir, "foo.txt");
     FileUtils.writeStringToFile(notALogFile, "some text");
 
-    ExportLogsFunction.Args args = new ExportLogsFunction.Args(null, null, "info", false);
+    verifyExportLogsFunctionDoesNotBlowUp();
+
+    Cache cache = GemFireCacheImpl.getInstance();
+    assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isEmpty();
+  }
+
+  public static void verifyExportLogsFunctionDoesNotBlowUp() throws Throwable {
+    ExportLogsFunction.Args args =
+        new ExportLogsFunction.Args(null, null, "info", false, false, false);
 
     CapturingResultSender resultSender = new CapturingResultSender();
     FunctionContext context = new FunctionContextImpl("functionId", args, resultSender);
@@ -86,38 +80,45 @@ public class ExportLogsFunctionIntegrationTest {
     if (resultSender.getThrowable() != null) {
       throw resultSender.getThrowable();
     }
-
-    Cache cache = GemFireCacheImpl.getInstance();
-    assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isEmpty();
   }
 
   @Test
   public void createOrGetExistingExportLogsRegionDoesNotBlowUp() throws Exception {
-    ExportLogsFunction.createOrGetExistingExportLogsRegion(false);
-
-    Cache cache = GemFireCacheImpl.getInstance();
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+    ExportLogsFunction.createOrGetExistingExportLogsRegion(false, cache);
     assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
   }
 
   @Test
   public void destroyExportLogsRegionWorksAsExpectedForInitiatingMember()
       throws IOException, ClassNotFoundException {
-    ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
-    Cache cache = GemFireCacheImpl.getInstance();
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+    ExportLogsFunction.createOrGetExistingExportLogsRegion(true, cache);
     assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
 
-    ExportLogsFunction.destroyExportLogsRegion();
+    ExportLogsFunction.destroyExportLogsRegion(cache);
     assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
   }
 
 
   @Test
   public void argsCorrectlyBuildALogLevelFilter() {
-    ExportLogsFunction.Args args = new ExportLogsFunction.Args(null, null, "info", false);
+    ExportLogsFunction.Args args =
+        new ExportLogsFunction.Args(null, null, "info", false, false, false);
+    assertThat(args.getLogLevel().toString()).isEqualTo("INFO");
+    assertThat(args.isThisLogLevelOnly()).isFalse();
+    assertThat(args.isIncludeLogs()).isTrue();
+    assertThat(args.isIncludeStats()).isTrue();
+  }
 
-    assertThat(args.getPermittedLogLevels()).contains("info");
-    assertThat(args.getPermittedLogLevels()).contains("error");
-    assertThat(args.getPermittedLogLevels()).doesNotContain("fine");
+  @Test
+  public void argsCorrectlyBuilt() {
+    ExportLogsFunction.Args args =
+        new ExportLogsFunction.Args(null, null, "error", true, true, false);
+    assertThat(args.getLogLevel()).isEqualTo(Level.ERROR);
+    assertThat(args.isThisLogLevelOnly()).isTrue();
+    assertThat(args.isIncludeLogs()).isTrue();
+    assertThat(args.isIncludeStats()).isFalse();
   }
 
   private static class CapturingResultSender implements ResultSender {

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriterUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriterUnitTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriterUnitTest.java
new file mode 100644
index 0000000..27becc5
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriterUnitTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import org.apache.geode.cache.EntryEvent;
+import org.apache.geode.test.junit.categories.UnitTest;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(UnitTest.class)
+public class ExportLogsCacheWriterUnitTest {
+  private ExportLogsCacheWriter writer = new ExportLogsCacheWriter();
+
+  @Test
+  public void writerReturnNullIfNoWrite() throws Exception {
+    writer.startFile("server-1");
+    assertThat(writer.endFile()).isNull();
+  }
+
+  @Test
+  public void writerReturnsPathIfWritten() throws Exception {
+    writer.startFile("server-1");
+    EntryEvent event = mock(EntryEvent.class);
+    when(event.getNewValue()).thenReturn(new byte[] {});
+    writer.beforeCreate(event);
+    assertThat(writer.endFile()).isNotNull();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterIntegrationTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterIntegrationTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterIntegrationTest.java
new file mode 100644
index 0000000..0df00b0
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterIntegrationTest.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import static org.apache.geode.distributed.ConfigurationProperties.LOG_FILE;
+import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_ARCHIVE_FILE;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.google.common.collect.Sets;
+
+import org.apache.geode.management.internal.cli.functions.ExportLogsFunctionIntegrationTest;
+import org.apache.geode.test.dunit.rules.Server;
+import org.apache.geode.test.dunit.rules.ServerStarterRule;
+import org.apache.geode.test.junit.categories.IntegrationTest;
+import org.apache.logging.log4j.Level;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+
+@Category(IntegrationTest.class)
+public class LogExporterIntegrationTest {
+
+  @Rule
+  public ServerStarterRule serverStarterRule = new ServerStarterRule();
+
+  private LogExporter logExporter;
+
+  private Properties properties;
+
+  private LogFilter filter = new LogFilter(Level.INFO, null, null);
+
+  @Before
+  public void before() throws Exception {
+    properties = new Properties();
+  }
+
+  @Test
+  public void serverStartedWithWrongSuffix() throws Exception {
+    properties.setProperty(LOG_FILE, new File("test.txt").getAbsolutePath());
+    properties.setProperty(STATISTIC_ARCHIVE_FILE, "archive.archive");
+    Server server = serverStarterRule.startServer(properties);
+    File serverWorkingDir = server.getWorkingDir();
+
+    logExporter = new LogExporter(filter, new File(serverWorkingDir, "test.log"),
+        new File(serverWorkingDir, "stats.gfs"));
+    List<Path> logFiles = logExporter.findLogFiles(serverWorkingDir.toPath());
+    assertThat(logFiles).isEmpty();
+
+    List<Path> statsFiles = logExporter.findStatFiles(serverWorkingDir.toPath());
+    assertThat(statsFiles).isEmpty();
+  }
+
+  @Test
+  public void serverStartedWithCorrectSuffix() throws Exception {
+    // ("relative log file is problematic in the test environment")
+    properties.setProperty(LOG_FILE, new File("test.log").getAbsolutePath());
+    properties.setProperty(STATISTIC_ARCHIVE_FILE, "archive.gfs");
+    Server server = serverStarterRule.startServer(properties);
+    File serverWorkingDir = server.getWorkingDir();
+
+    logExporter = new LogExporter(filter, new File(serverWorkingDir, "test.log"),
+        new File(serverWorkingDir, "archive.gfs"));
+    List<Path> logFiles = logExporter.findLogFiles(serverWorkingDir.toPath());
+    assertThat(logFiles).hasSize(1);
+    assertThat(logFiles.get(0)).hasFileName("test.log");
+
+    List<Path> statsFiles = logExporter.findStatFiles(serverWorkingDir.toPath());
+    assertThat(statsFiles).hasSize(1);
+    assertThat(statsFiles.get(0)).hasFileName("archive.gfs");
+  }
+
+  @Test
+  @Ignore("fix .gz suffix")
+  public void serverStartedWithGZSuffix() throws Exception {
+    properties.setProperty(LOG_FILE, "test.log.gz");
+    properties.setProperty(STATISTIC_ARCHIVE_FILE, "archive.gfs.gz");
+    Server server = serverStarterRule.startServer(properties);
+    File serverWorkingDir = server.getWorkingDir();
+
+    logExporter = new LogExporter(filter, new File(serverWorkingDir, "test.log"),
+        new File(serverWorkingDir, "stats.gfs"));
+    List<Path> logFiles = logExporter.findLogFiles(serverWorkingDir.toPath());
+    assertThat(logFiles).hasSize(1);
+
+    List<Path> statsFiles = logExporter.findStatFiles(serverWorkingDir.toPath());
+    assertThat(statsFiles).hasSize(1);
+  }
+
+  @Test
+  public void testNoStatsFile() throws Throwable {
+    Path logsFile = Files.createTempFile("server", ".log");
+    properties.setProperty(LOG_FILE, logsFile.toString());
+    serverStarterRule.startServer(properties);
+
+    ExportLogsFunctionIntegrationTest.verifyExportLogsFunctionDoesNotBlowUp();
+  }
+
+  @Test
+  public void testWithRelativeStatsFile() throws Throwable {
+    Path logsFile = Files.createTempFile("server", ".log");
+    // Path statsFile = Files.createTempFile("stats", ".gfs");
+    properties.setProperty(LOG_FILE, logsFile.toString());
+    properties.setProperty(STATISTIC_ARCHIVE_FILE, "stats.gfs");
+    serverStarterRule.startServer(properties);
+
+    ExportLogsFunctionIntegrationTest.verifyExportLogsFunctionDoesNotBlowUp();
+  }
+
+  @Test
+  public void testWithRelativeLogsFile() throws Throwable {
+    Path statsFile = Files.createTempFile("stats", ".gfs");
+    properties.setProperty(LOG_FILE, "sever.log");
+    properties.setProperty(STATISTIC_ARCHIVE_FILE, statsFile.toString());
+    Server server = serverStarterRule.startServer(properties);
+
+    ExportLogsFunctionIntegrationTest.verifyExportLogsFunctionDoesNotBlowUp();
+  }
+
+  @Test
+  public void testWithAbsoluteLogsStatsFile() throws Exception {
+    File logsDir = Files.createTempDirectory("logs").toFile();
+    File statsDir = Files.createTempDirectory("stats").toFile();
+
+    File logFile = new File(logsDir, "server.log");
+    File statsFile = new File(statsDir, "stats.gfs");
+
+    properties.setProperty(LOG_FILE, logFile.getAbsolutePath());
+    properties.setProperty(STATISTIC_ARCHIVE_FILE, statsFile.getAbsolutePath());
+
+    serverStarterRule.startServer(properties);
+
+    logExporter = new LogExporter(filter, logFile, statsFile);
+    Path exportedZip = logExporter.export();
+    Set<String> actualFiles = LogExporterTest.getZipEntries(exportedZip.toString());
+    Set<String> expectedFiles = Sets.newHashSet("server.log", "stats.gfs");
+
+    assertThat(actualFiles).isEqualTo(expectedFiles);
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterTest.java
index 4cb76d3..a387af3 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogExporterTest.java
@@ -31,12 +31,18 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 
+import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
 import java.nio.file.Path;
-import java.text.ParseException;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
 import java.util.stream.Stream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
 
 @Category(IntegrationTest.class)
 public class LogExporterTest {
@@ -44,31 +50,56 @@ public class LogExporterTest {
   public TemporaryFolder temporaryFolder = new TemporaryFolder();
 
   private LogExporter logExporter;
+  LogFilter logFilter;
+  private File workingDir;
 
   @Before
-  public void setup() throws ParseException {
-    LogFilter logFilter = mock(LogFilter.class);
+  public void setup() throws Exception {
+    logFilter = mock(LogFilter.class);
 
     when(logFilter.acceptsFile(any())).thenReturn(true);
     when(logFilter.acceptsLine(any())).thenReturn(LogFilter.LineFilterResult.LINE_ACCEPTED);
 
-    logExporter = new LogExporter(logFilter);
+    workingDir = temporaryFolder.newFolder("workingDir");
+    logExporter = new LogExporter(logFilter, new File(workingDir, "server.log"),
+        new File(workingDir, "stats.gfs"));
   }
 
+  @Test
+  public void exporterShouldReturnNullIfNoFile() throws Exception {
+    assertThat(logExporter.export()).isNull();
+  }
+
+  @Test
+  public void exporterShouldStillReturnFilefNoAcceptableLogs() throws Exception {
+    File logFile1 = new File(workingDir, "server1.log");
+    FileUtils.writeStringToFile(logFile1, "some log for server1 \n some other log line");
+    when(logFilter.acceptsLine(any())).thenReturn(LogFilter.LineFilterResult.LINE_REJECTED);
+    Path exportedZip = logExporter.export();
+    assertThat(exportedZip).isNotNull();
+
+    File unzippedExportDir = temporaryFolder.newFolder("unzippedExport");
+    ZipUtils.unzip(exportedZip.toString(), unzippedExportDir.getCanonicalPath());
+    assertThat(unzippedExportDir.listFiles()).hasSize(1);
+
+    // check the exported file has no content
+    BufferedReader br =
+        new BufferedReader(new FileReader(new File(unzippedExportDir, "server1.log")));
+    assertThat(br.readLine()).isNull();
+  }
 
   @Test
   public void exportBuildsZipCorrectlyWithTwoLogFiles() throws Exception {
-    File serverWorkingDir = temporaryFolder.newFolder("serverWorkingDir");
-    File logFile1 = new File(serverWorkingDir, "server1.log");
+    File logFile1 = new File(workingDir, "server1.log");
     FileUtils.writeStringToFile(logFile1, "some log for server1 \n some other log line");
-    File logFile2 = new File(serverWorkingDir, "server2.log");
+    File logFile2 = new File(workingDir, "server2.log");
     FileUtils.writeStringToFile(logFile2, "some log for server2 \n some other log line");
 
-    File notALogFile = new File(serverWorkingDir, "foo.txt");
+    File notALogFile = new File(workingDir, "foo.txt");
     FileUtils.writeStringToFile(notALogFile, "some text");
 
 
-    Path zippedExport = logExporter.export(serverWorkingDir.toPath());
+    Path zippedExport = logExporter.export();
 
     File unzippedExportDir = temporaryFolder.newFolder("unzippedExport");
     ZipUtils.unzip(zippedExport.toString(), unzippedExportDir.getCanonicalPath());
@@ -83,7 +114,6 @@ public class LogExporterTest {
 
   @Test
   public void findLogFilesExcludesFilesWithIncorrectExtension() throws Exception {
-    File workingDir = temporaryFolder.newFolder("workingDir");
     File logFile = new File(workingDir, "server.log");
 
     FileUtils.writeStringToFile(logFile, "some log line");
@@ -95,4 +125,21 @@ public class LogExporterTest {
     assertThat(logExporter.findLogFiles(workingDir.toPath())).doesNotContain(notALogFile.toPath());
   }
 
+  @Test
+  public void findStatFiles() throws Exception {
+    File statFile = new File(workingDir, "server.gfs");
+
+    FileUtils.writeStringToFile(statFile, "some stat line");
+
+    File notALogFile = new File(workingDir, "foo.txt");
+    FileUtils.writeStringToFile(notALogFile, "some text");
+
+    assertThat(logExporter.findStatFiles(workingDir.toPath())).contains(statFile.toPath());
+    assertThat(logExporter.findStatFiles(workingDir.toPath())).doesNotContain(notALogFile.toPath());
+  }
+
+  public static Set<String> getZipEntries(String zipFilePath) throws IOException {
+    return new ZipFile(zipFilePath).stream().map(ZipEntry::getName).collect(Collectors.toSet());
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/geode/blob/bf788176/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogFilterTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogFilterTest.java b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogFilterTest.java
index ca94fb6..d29e4de 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogFilterTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/LogFilterTest.java
@@ -16,33 +16,59 @@
 
 package org.apache.geode.management.internal.cli.util;
 
-import static java.util.stream.Collectors.toSet;
 import static org.apache.geode.management.internal.cli.util.LogFilter.LineFilterResult.LINE_ACCEPTED;
 import static org.apache.geode.management.internal.cli.util.LogFilter.LineFilterResult.LINE_REJECTED;
 import static org.apache.geode.management.internal.cli.util.LogFilter.LineFilterResult.REMAINDER_OF_FILE_REJECTED;
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 import org.apache.geode.test.junit.categories.UnitTest;
+import org.apache.logging.log4j.Level;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.io.File;
+import java.nio.file.FileSystem;
+import java.nio.file.Path;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+import java.nio.file.spi.FileSystemProvider;
 import java.time.LocalDateTime;
-import java.util.Set;
-import java.util.stream.Stream;
 
 @Category(UnitTest.class)
 public class LogFilterTest {
   @Test
   public void permittedLogLevelsCanFilterLines() throws Exception {
-    Set<String> permittedLogLevels = Stream.of("info", "finest").collect(toSet());
+    LogFilter logFilter = new LogFilter(Level.INFO, null, null);
 
-    LogFilter logFilter = new LogFilter(permittedLogLevels, null, null);
+    LocalDateTime now = LocalDateTime.now();
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, now)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.WARN, now)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.DEBUG, now)).isEqualTo(LINE_REJECTED);
+    assertThat(logFilter.acceptsLogEntry(Level.TRACE, now)).isEqualTo(LINE_REJECTED);
+  }
+
+  @Test
+  public void permittedOnlyLogLevels() throws Exception {
+    LogFilter logFilter = new LogFilter(Level.INFO, true, null, null);
+
+    LocalDateTime now = LocalDateTime.now();
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, now)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.WARN, now)).isEqualTo(LINE_REJECTED);
+    assertThat(logFilter.acceptsLogEntry(Level.DEBUG, now)).isEqualTo(LINE_REJECTED);
+    assertThat(logFilter.acceptsLogEntry(Level.TRACE, now)).isEqualTo(LINE_REJECTED);
+  }
+
+  @Test
+  public void permittedLogLevelsALL() throws Exception {
+    LogFilter logFilter = new LogFilter(Level.ALL, null, null);
 
     LocalDateTime now = LocalDateTime.now();
-    assertThat(logFilter.acceptsLogEntry("info", now)).isEqualTo(LINE_ACCEPTED);
-    assertThat(logFilter.acceptsLogEntry("finest", now)).isEqualTo(LINE_ACCEPTED);
-    assertThat(logFilter.acceptsLogEntry("fine", now)).isEqualTo(LINE_REJECTED);
-    assertThat(logFilter.acceptsLogEntry("error", now)).isEqualTo(LINE_REJECTED);
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, now)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.WARN, now)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.DEBUG, now)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.TRACE, now)).isEqualTo(LINE_ACCEPTED);
   }
 
 
@@ -50,13 +76,13 @@ public class LogFilterTest {
   public void startDateCanFilterLines() {
     LocalDateTime startDate = LocalDateTime.now().minusDays(2);
 
-    LogFilter logFilter = new LogFilter(null, startDate, null);
+    LogFilter logFilter = new LogFilter(Level.ALL, startDate, null);
 
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now())).isEqualTo(LINE_ACCEPTED);
-    assertThat(logFilter.acceptsLogEntry("info", startDate)).isEqualTo(LINE_ACCEPTED);
-    assertThat(logFilter.acceptsLogEntry("fine", startDate)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now())).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, startDate)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.DEBUG, startDate)).isEqualTo(LINE_ACCEPTED);
 
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now().minusDays(3)))
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now().minusDays(3)))
         .isEqualTo(LINE_REJECTED);
   }
 
@@ -64,15 +90,15 @@ public class LogFilterTest {
   public void endDateCanFilterLines() {
     LocalDateTime endDate = LocalDateTime.now().minusDays(2);
 
-    LogFilter logFilter = new LogFilter(null, null, endDate);
+    LogFilter logFilter = new LogFilter(Level.ALL, null, endDate);
 
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now().minusDays(3)))
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now().minusDays(3)))
         .isEqualTo(LINE_ACCEPTED);
 
-    assertThat(logFilter.acceptsLogEntry("info", endDate)).isEqualTo(LINE_ACCEPTED);
-    assertThat(logFilter.acceptsLogEntry("fine", endDate)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, endDate)).isEqualTo(LINE_ACCEPTED);
+    assertThat(logFilter.acceptsLogEntry(Level.DEBUG, endDate)).isEqualTo(LINE_ACCEPTED);
 
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now()))
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now()))
         .isEqualTo(REMAINDER_OF_FILE_REJECTED);
   }
 
@@ -81,46 +107,107 @@ public class LogFilterTest {
     LocalDateTime startDate = LocalDateTime.now().minusDays(5);
     LocalDateTime endDate = LocalDateTime.now().minusDays(2);
 
-    Set<String> permittedLogLevels = Stream.of("info", "finest").collect(toSet());
+    LogFilter logFilter = new LogFilter(Level.INFO, startDate, endDate);
 
-    LogFilter logFilter = new LogFilter(permittedLogLevels, startDate, endDate);
-
-
-    assertThat(logFilter.acceptsLogEntry("error", LocalDateTime.now().minusDays(6)))
+    assertThat(logFilter.acceptsLogEntry(Level.ERROR, LocalDateTime.now().minusDays(6)))
         .isEqualTo(LINE_REJECTED);
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now().minusDays(6)))
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now().minusDays(6)))
         .isEqualTo(LINE_REJECTED);
 
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(LINE_REJECTED);
 
 
-    assertThat(logFilter.acceptsLogEntry("error", LocalDateTime.now().minusDays(4)))
+    assertThat(logFilter.acceptsLogEntry(Level.ERROR, LocalDateTime.now().minusDays(6)))
         .isEqualTo(LINE_REJECTED);
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(LINE_REJECTED);
 
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now().minusDays(4)))
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now().minusDays(4)))
         .isEqualTo(LINE_ACCEPTED);
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(LINE_ACCEPTED);
 
 
 
-    assertThat(logFilter.acceptsLogEntry("error", LocalDateTime.now().minusDays(1)))
+    assertThat(logFilter.acceptsLogEntry(Level.ERROR, LocalDateTime.now().minusDays(1)))
         .isEqualTo(REMAINDER_OF_FILE_REJECTED);
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(REMAINDER_OF_FILE_REJECTED);
 
-    assertThat(logFilter.acceptsLogEntry("info", LocalDateTime.now().minusDays(1)))
+    assertThat(logFilter.acceptsLogEntry(Level.INFO, LocalDateTime.now().minusDays(1)))
         .isEqualTo(REMAINDER_OF_FILE_REJECTED);
   }
 
   @Test
   public void firstLinesAreAcceptedIfParsableLineHasNotBeenSeenYet() {
-    LogFilter logFilter = new LogFilter(Stream.of("info").collect(toSet()), null, null);
+    LogFilter logFilter = new LogFilter(Level.INFO, null, null);
 
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(LINE_ACCEPTED);
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(LINE_ACCEPTED);
 
-    assertThat(logFilter.acceptsLogEntry("error", LocalDateTime.now())).isEqualTo(LINE_REJECTED);
+    assertThat(logFilter.acceptsLogEntry(Level.DEBUG, LocalDateTime.now()))
+        .isEqualTo(LINE_REJECTED);
     assertThat(logFilter.acceptsLogEntry(null)).isEqualTo(LINE_REJECTED);
+  }
 
+  @Test
+  public void testAcceptFileWithCreateTimeNotAvailale() throws Exception {
+    Path path = mock(Path.class);
+    when(path.toFile()).thenReturn(mock(File.class));
+    when(path.toFile().lastModified()).thenReturn(System.currentTimeMillis());
+    when(path.getFileSystem()).thenThrow(SecurityException.class);
+
+    // a filter with no start/end date should accept this file
+    LogFilter filter = new LogFilter(Level.INFO, null, null);
+    assertThat(filter.acceptsFile(path)).isTrue();
+
+    // a filter with a start date of now should not accept the file
+    filter = new LogFilter(Level.INFO, LocalDateTime.now(), null);
+    assertThat(filter.acceptsFile(path)).isFalse();
+
+    // a filter with a start date of now minus an hour should not accept the file
+    filter = new LogFilter(Level.INFO, LocalDateTime.now().minusHours(1), null);
+    assertThat(filter.acceptsFile(path)).isTrue();
+
+    // a filter with an end date of now should accept the file
+    filter = new LogFilter(Level.INFO, null, LocalDateTime.now());
+    assertThat(filter.acceptsFile(path)).isTrue();
+
+    // a filter with an end date of an hour ago should also accept the file, because we only
+    // know the last modified time of the file, when don't know what time this file is created, it
+    // may still be created more than an hour ago.
+    filter = new LogFilter(Level.INFO, null, LocalDateTime.now().minusHours(1));
+    assertThat(filter.acceptsFile(path)).isTrue();
+  }
+
+  @Test
+  public void testAcceptFileWithCreateTimeAvailable() throws Exception {
+    long now = System.currentTimeMillis();
+    Path path = mock(Path.class);
+    when(path.toFile()).thenReturn(mock(File.class));
+    when(path.toFile().lastModified()).thenReturn(System.currentTimeMillis());
+    BasicFileAttributes attributes = mock(BasicFileAttributes.class);
+    when(path.getFileSystem()).thenReturn(mock(FileSystem.class));
+    when(path.getFileSystem().provider()).thenReturn(mock(FileSystemProvider.class));
+    when(path.getFileSystem().provider().readAttributes(path, BasicFileAttributes.class))
+        .thenReturn(attributes);
+    when(attributes.creationTime()).thenReturn(FileTime.fromMillis(now - 10000));
+
+    // a filter with no start/end date should accept this file
+    LogFilter filter = new LogFilter(Level.INFO, null, null);
+    assertThat(filter.acceptsFile(path)).isTrue();
+
+    // a filter with a start date of now should not accept the file
+    filter = new LogFilter(Level.INFO, LocalDateTime.now(), null);
+    assertThat(filter.acceptsFile(path)).isFalse();
+
+    // a filter with a start date of now minus an hour should not accept the file
+    filter = new LogFilter(Level.INFO, LocalDateTime.now().minusHours(1), null);
+    assertThat(filter.acceptsFile(path)).isTrue();
+
+    // a filter with an end date of now should accept the file
+    filter = new LogFilter(Level.INFO, null, LocalDateTime.now());
+    assertThat(filter.acceptsFile(path)).isTrue();
+
+    // a filter with an end date of an hour ago should not accept the file
+    filter = new LogFilter(Level.INFO, null, LocalDateTime.now().minusHours(1));
+    assertThat(filter.acceptsFile(path)).isFalse();
   }
 }


Mime
View raw message