geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jstew...@apache.org
Subject [2/2] geode git commit: Working on merging logs
Date Thu, 16 Feb 2017 19:42:46 GMT
Working on merging logs


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/eef1601d
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/eef1601d
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/eef1601d

Branch: refs/heads/feature/GEODE-2267
Commit: eef1601d123d23aa54af8872eaf84037b84c9ff6
Parents: f542a86
Author: Jared Stewart <jstewart@pivotal.io>
Authored: Wed Feb 15 09:13:47 2017 -0800
Committer: Jared Stewart <jstewart@pivotal.io>
Committed: Thu Feb 16 11:41:23 2017 -0800

----------------------------------------------------------------------
 .../cli/commands/MiscellaneousCommands.java     |  63 +-----
 .../cli/functions/ExportLogsFunction.java       |   2 +-
 .../internal/cli/util/ExportLogsRepository.java |  39 ++++
 .../management/internal/cli/util/MergeLogs.java |  89 ++++++--
 .../internal/configuration/utils/ZipUtils.java  |   5 +
 .../internal/cli/commands/ExportLogsDUnit.java  | 210 ++++++++++++++++---
 .../cli/functions/ExportLogsFunctionTest.java   |  15 +-
 .../internal/cli/util/MergeLogsTest.java        | 114 ++++++++++
 .../configuration/ZipUtilsJUnitTest.java        |  16 ++
 .../dunit/rules/GfshShellConnectionRule.java    |   4 +
 10 files changed, 452 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
index b8845e2..0e7ea0b 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
@@ -719,6 +719,8 @@ public class MiscellaneousCommands implements CommandMarker {
           unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
           help = CliStrings.EXPORT_LOGS__ENDTIME__HELP) String end) {
     Result result = null;
+    Logger logger = LogService.getLogger();
+
     try {
       GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
       Region region = ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
@@ -726,7 +728,6 @@ public class MiscellaneousCommands implements CommandMarker {
           (ExportLogsCacheWriter) region.getAttributes().getCacheWriter();
 
       Set<DistributedMember> targetMembers = CliUtil.findMembersIncludingLocators(groups,
memberIds);
-      Logger logger = LogService.getLogger();
 
       Map<String, Path> zipFilesFromMembers = new HashMap<>();
       for (DistributedMember server : targetMembers) {
@@ -750,14 +751,15 @@ public class MiscellaneousCommands implements CommandMarker {
         FileUtils.deleteQuietly(zipFile.toFile());
       }
 
-      Path exportedLogsZipFile = Paths.get("exportedLogs[" + System.currentTimeMillis() +
"].zip");
+      Path workingDir = Paths.get(System.getProperty("user.dir"));
+      Path exportedLogsZipFile = workingDir.resolve("exportedLogs[" + System.currentTimeMillis()
+ "].zip").toAbsolutePath();
 
       logger.info("Zipping into: " + exportedLogsZipFile.toString());
       ZipUtils.zipDirectory(exportedLogsDir, exportedLogsZipFile);
       FileUtils.deleteDirectory(tempDir.toFile());
-
+      result = ResultBuilder.createInfoResult("File exported to: " + exportedLogsZipFile.toString());
     } catch (Exception ex) {
-      LogWrapper.getInstance().fine(ex.getMessage());
+      logger.error(ex, ex);
       result = ResultBuilder.createUserErrorResult(ex.getMessage());
     }
 
@@ -765,58 +767,7 @@ public class MiscellaneousCommands implements CommandMarker {
     return result;
   }
 
-  Result mergeLogs(List<String> logsToMerge) {
-    // create a new process for merging
-    LogWrapper.getInstance().fine("Exporting logs merging logs" + logsToMerge.size());
-    if (logsToMerge.size() > 1) {
-      List<String> commandList = new ArrayList<String>();
-      commandList.add(System.getProperty("java.home") + File.separatorChar + "bin"
-          + File.separatorChar + "java");
-      commandList.add("-classpath");
-      commandList.add(System.getProperty("java.class.path", "."));
-      commandList.add(MergeLogs.class.getName());
-
-      commandList
-          .add(logsToMerge.get(0).substring(0, logsToMerge.get(0).lastIndexOf(File.separator)
+ 1));
-
-      ProcessBuilder procBuilder = new ProcessBuilder(commandList);
-      StringBuilder output = new StringBuilder();
-      String errorString = new String();
-      try {
-        LogWrapper.getInstance().fine("Exporting logs now merging logs");
-        Process mergeProcess = procBuilder.redirectErrorStream(true).start();
-
-        mergeProcess.waitFor();
-
-        InputStream inputStream = mergeProcess.getInputStream();
-        BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
-        String line = null;
-
-        while ((line = br.readLine()) != null) {
-          output.append(line).append(GfshParser.LINE_SEPARATOR);
-        }
-        mergeProcess.destroy();
-      } catch (Exception e) {
-        LogWrapper.getInstance().fine(e.getMessage());
-        return ResultBuilder.createUserErrorResult(
-            CliStrings.EXPORT_LOGS__MSG__FUNCTION_EXCEPTION + "Could not merge");
-      } finally {
-        if (errorString != null) {
-          output.append(errorString).append(GfshParser.LINE_SEPARATOR);
-          LogWrapper.getInstance().fine("Exporting logs after merging logs " + output);
-        }
-      }
-      if (output.toString().contains("Sucessfully merged logs")) {
-        LogWrapper.getInstance().fine("Exporting logs Sucessfully merged logs");
-        return ResultBuilder.createInfoResult("Successfully merged");
-      } else {
-        LogWrapper.getInstance().fine("Could not merge");
-        return ResultBuilder.createUserErrorResult(
-            CliStrings.EXPORT_LOGS__MSG__FUNCTION_EXCEPTION + "Could not merge");
-      }
-    }
-    return ResultBuilder.createInfoResult("Only one log file, nothing to merge");
-  }
+
 
   /****
    * Current implementation supports writing it to a file and returning the location of the
file

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
index 7d196e6..220240a 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
@@ -160,7 +160,7 @@ public class ExportLogsFunction implements Function, InternalEntity {
         int logLevelCode = LogWriterImpl.levelNameToCode(level);
         int logLevelCodeThreshold = LogWriterImpl.levelNameToCode(logLevel);
 
-        return logLevelCode <= logLevelCodeThreshold;
+        return logLevelCode >= logLevelCodeThreshold;
       }).collect(toSet());
     }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
new file mode 100644
index 0000000..9a79fc0
--- /dev/null
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsRepository.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import java.util.Map;
+
+public class ExportLogsRepository {
+
+
+  Map exportFiles;
+
+  public void addFile() {
+
+  }
+
+  public void deleteFile() {
+
+  }
+
+  private void cleanUpExpiredFiles() {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
index 8d2ef45..67d5473 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/MergeLogs.java
@@ -14,17 +14,30 @@
  */
 package org.apache.geode.management.internal.cli.util;
 
+import static java.util.stream.Collectors.toList;
+
+import java.io.BufferedReader;
 import java.io.File;
 
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.FilenameFilter;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.io.PrintWriter;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.geode.internal.logging.MergeLogFiles;
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.cli.GfshParser;
+import org.apache.geode.management.internal.cli.LogWrapper;
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
+import org.apache.geode.management.internal.cli.result.ResultBuilder;
 
 /**
  * 
@@ -32,17 +45,54 @@ import org.apache.geode.management.internal.cli.i18n.CliStrings;
  */
 
 public class MergeLogs {
-  /**
-   * @param args
-   */
+
+  public static void mergeLogsInNewProcess(Path logDirectory) {
+    // create a new process for merging
+    LogWrapper.getInstance().fine("Exporting logs merging logs" + logDirectory);
+    List<String> commandList = new ArrayList<String>();
+    commandList.add(System.getProperty("java.home") + File.separatorChar + "bin"
+        + File.separatorChar + "java");
+    commandList.add("-classpath");
+    commandList.add(System.getProperty("java.class.path", "."));
+    commandList.add(MergeLogs.class.getName());
+
+    commandList
+        .add(logDirectory.toAbsolutePath().toString());
+
+    ProcessBuilder procBuilder = new ProcessBuilder(commandList);
+    StringBuilder output = new StringBuilder();
+    String errorString = new String();
+    try {
+      LogWrapper.getInstance().fine("Exporting logs now merging logs");
+      Process mergeProcess = procBuilder.redirectErrorStream(true).start();
+
+      mergeProcess.waitFor();
+
+      InputStream inputStream = mergeProcess.getInputStream();
+      BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
+      String line = null;
+
+      while ((line = br.readLine()) != null) {
+        output.append(line).append(GfshParser.LINE_SEPARATOR);
+      }
+      mergeProcess.destroy();
+    } catch (Exception e) {
+      LogWrapper.getInstance().severe(e.getMessage());
+    }
+    if (output.toString().contains("Merged logs to: ")) {
+      LogWrapper.getInstance().fine("Exporting logs Sucessfully merged logs");
+    } else {
+      LogWrapper.getInstance().severe("Could not merge");
+    }
+  }
 
   public static void main(String[] args) {
     if (args.length < 1 || args.length > 1) {
       throw new IllegalArgumentException("Requires only 1  arguments : <targetDirName>");
     }
     try {
-      String result = mergeLogFile(args[0]);
-      System.out.println(result);
+      String result = mergeLogFile(args[0]).getCanonicalPath();
+      System.out.println("Merged logs to: " + result);
     } catch (Exception e) {
       System.out.println(e.getMessage());
     }
@@ -50,27 +100,32 @@ public class MergeLogs {
 
   }
 
-  static String mergeLogFile(String dirName) throws Exception {
-    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
-    File dir = new File(dirName);
-    String[] logsToMerge = dir.list();
-    InputStream[] logFiles = new FileInputStream[logsToMerge.length];
+  protected static List<File> findLogFilesToMerge (File dir) {
+    return FileUtils.listFiles(dir, new String[]{"log"}, true).stream().collect(toList());
+  }
+
+  static File mergeLogFile(String dirName) throws Exception {
+    Path dir = Paths.get(dirName);
+    List<File> logsToMerge = findLogFilesToMerge(dir.toFile());
+    InputStream[] logFiles = new FileInputStream[logsToMerge.size()];
     String[] logFileNames = new String[logFiles.length];
-    for (int i = 0; i < logsToMerge.length; i++) {
+    for (int i = 0; i < logsToMerge.size(); i++) {
       try {
-        logFiles[i] = new FileInputStream(dirName + File.separator + logsToMerge[i]);
-        logFileNames[i] = dirName + File.separator + logsToMerge[i];
+        logFiles[i] = new FileInputStream(logsToMerge.get(i));
+        logFileNames[i] = dir.relativize(logsToMerge.get(i).toPath()).toString();
       } catch (FileNotFoundException e) {
         throw new Exception(
-            logsToMerge[i] + " " + CliStrings.EXPORT_LOGS__MSG__FILE_DOES_NOT_EXIST);
+            logsToMerge.get(i) + " " + CliStrings.EXPORT_LOGS__MSG__FILE_DOES_NOT_EXIST);
       }
     }
 
     PrintWriter mergedLog = null;
+    File mergedLogFile = null;
     try {
       String mergeLog =
-          dirName + File.separator + "merge_" + sdf.format(new java.util.Date()) + ".log";
-      mergedLog = new PrintWriter(mergeLog);
+          dirName + File.separator + "merge_" + new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss").format(new
java.util.Date()) + ".log";
+      mergedLogFile = new File(mergeLog);
+      mergedLog = new PrintWriter(mergedLogFile);
       boolean flag = MergeLogFiles.mergeLogFiles(logFiles, logFileNames, mergedLog);
     } catch (FileNotFoundException e) {
       throw new Exception(
@@ -79,7 +134,7 @@ public class MergeLogs {
       throw new Exception("Exception in creating PrintWriter in MergeLogFiles" + e.getMessage());
     }
 
-    return "Sucessfully merged logs";
+    return mergedLogFile;
   }
 
 

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
index 6c546a7..e32803a 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/utils/ZipUtils.java
@@ -41,6 +41,11 @@ import org.apache.commons.io.IOUtils;
 public class ZipUtils {
 
   public static void zipDirectory(Path sourceDirectory, Path targetFile) throws IOException
{
+    Path parentDir = targetFile.getParent();
+    if (parentDir != null && !parentDir.toFile().exists()) {
+      parentDir.toFile().mkdirs();
+    }
+
     try (ZipOutputStream zs = new ZipOutputStream(Files.newOutputStream(targetFile))) {
       Files.walk(sourceDirectory).filter(path -> !Files.isDirectory(path)).forEach(path
-> {
         ZipEntry zipEntry = new ZipEntry(sourceDirectory.relativize(path).toString());

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
index 5258891..566a918 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
@@ -16,16 +16,19 @@
 
 package org.apache.geode.management.internal.cli.commands;
 
+import static java.util.stream.Collectors.joining;
 import static java.util.stream.Collectors.toList;
 import static java.util.stream.Collectors.toSet;
 import static org.assertj.core.api.Assertions.assertThat;
 
+import org.apache.commons.collections.map.HashedMap;
 import org.apache.commons.io.FileUtils;
 import org.apache.geode.cache.AttributesFactory;
 import org.apache.geode.cache.DataPolicy;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.RegionShortcut;
 import org.apache.geode.cache.Scope;
+import org.apache.geode.distributed.ConfigurationProperties;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.cache.InternalRegionArguments;
 import org.apache.geode.internal.logging.LogService;
@@ -34,25 +37,39 @@ import org.apache.geode.management.internal.cli.result.CommandResult;
 import org.apache.geode.management.internal.configuration.EventTestCacheWriter;
 import org.apache.geode.management.internal.configuration.domain.Configuration;
 import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.geode.test.dunit.IgnoredException;
 import org.apache.geode.test.dunit.rules.GfshShellConnectionRule;
 import org.apache.geode.test.dunit.rules.Locator;
 import org.apache.geode.test.dunit.rules.LocatorServerStartupRule;
+import org.apache.geode.test.dunit.rules.Member;
 import org.apache.geode.test.dunit.rules.Server;
+import org.apache.logging.log4j.Logger;
 import org.apache.logging.log4j.core.Appender;
+import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
+import java.io.Serializable;
 import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Properties;
 import java.util.Set;
+import java.util.UUID;
 import java.util.stream.Stream;
 
 
 public class ExportLogsDUnit {
+
+  private static final String ERROR_LOG_PREFIX = "[IGNORE]";
+
   @Rule
   public LocatorServerStartupRule lsRule = new LocatorServerStartupRule();
 
@@ -60,51 +77,77 @@ public class ExportLogsDUnit {
   public GfshShellConnectionRule gfshConnector = new GfshShellConnectionRule();
 
   private Locator locator;
+  private Server server1;
+  private Server server2;
 
-  @Test
-  public void testExport() throws Exception {
-    locator = lsRule.startLocatorVM(0);
+  Map<Member, List<LogLine>> expectedMessages;
+
+  @Before
+  public void setup() throws Exception {
+    Properties properties = new Properties();
+    properties.setProperty(ConfigurationProperties.LOG_LEVEL, "debug");
+
+    locator = lsRule.startLocatorVM(0, properties);
+    server1 = lsRule.startServerVM(1, properties, locator.getPort());
+    server2 = lsRule.startServerVM(2, properties, locator.getPort());
 
-    Server server = lsRule.startServerVM(1, locator.getPort());
-    Server server2 = lsRule.startServerVM(2, locator.getPort());
+    IgnoredException.addIgnoredException(ERROR_LOG_PREFIX);
+
+    expectedMessages = new HashMap<>();
+    expectedMessages.put(locator, listOfLogLines(locator.getName(), "info", "error", "debug"));
+    expectedMessages.put(server1, listOfLogLines(server1.getName(), "info", "error", "debug"));
+    expectedMessages.put(server2, listOfLogLines(server2.getName(), "info", "error", "debug"));
+
+    // log the messages in each of the members
+    for (Member member : expectedMessages.keySet()) {
+      List<LogLine> logLines = expectedMessages.get(member);
+
+      member.invoke(() -> {
+        Logger logger = LogService.getLogger();
+        logLines.forEach((LogLine logLine) -> logLine.writeLog(logger));
+      });
+    }
 
     gfshConnector.connectAndVerify(locator);
+  }
+
+  @Test
+  public void testExportWithThresholdLogLevelFilter() throws Exception {
 
     CommandResult result = gfshConnector.executeAndVerifyCommand(
-        "export logs  --dir=" + lsRule.getTempFolder().getRoot().getCanonicalPath());
+        "export logs --log-level=info --only-log-level=false --dir=" + lsRule.getTempFolder()
+            .getRoot().getCanonicalPath());
 
-    File locatorWorkingDir = locator.getWorkingDir();
-    List<File> zipFilesInDir = Stream.of(locatorWorkingDir.listFiles())
-        .filter(f -> f.getName().endsWith(".zip")).collect(toList());
+    File unzippedLogFileDir = unzipExportedLogs();
+    Set<String> acceptedLogLevels = Stream.of("info", "error").collect(toSet());
+    verifyZipFileContents(unzippedLogFileDir, acceptedLogLevels);
 
-    assertThat(zipFilesInDir).hasSize(1);
+  }
 
-    File unzippedLogFileDir = lsRule.getTempFolder().newFolder("unzippedLogs");
-    ZipUtils.unzip(zipFilesInDir.get(0).getCanonicalPath(), unzippedLogFileDir.getCanonicalPath());
 
-    Set<File> actualDirs =
-        Stream.of(unzippedLogFileDir.listFiles()).filter(File::isDirectory).collect(toSet());
+  @Test
+  public void testExportWithExactLogLevelFilter() throws Exception {
+    CommandResult result = gfshConnector.executeAndVerifyCommand(
+        "export logs --log-level=info --only-log-level=true --dir=" + lsRule.getTempFolder()
+            .getRoot().getCanonicalPath());
 
-    assertThat(actualDirs).hasSize(2);
+    File unzippedLogFileDir = unzipExportedLogs();
 
-    Set<String> expectedDirNames = Stream.of(server.getName(), server2.getName()).collect(toSet());
-    Set<String> actualDirNames = actualDirs.stream().map(File::getName).collect(toSet());
+    Set<String> acceptedLogLevels = Stream.of("info").collect(toSet());
+    verifyZipFileContents(unzippedLogFileDir, acceptedLogLevels);
+  }
 
-    assertThat(actualDirNames).isEqualTo(expectedDirNames);
+  @Test
+  public void testExportWithNoFilters() throws Exception {
+    CommandResult result = gfshConnector.executeAndVerifyCommand(
+        "export logs  --dir=" + "someDir" /*  lsRule.getTempFolder().getRoot().getCanonicalPath()
*/);
 
-    System.out.println("Unzipped artifacts:");
-    for (File dir : actualDirs) {
-      Set<String> fileNamesInDir = Stream.of(dir.listFiles()).map(File::getName).collect(toSet());
-
-      System.out.println(dir.getCanonicalPath() + " : " + fileNamesInDir);
-      assertThat(fileNamesInDir).contains(dir.getName() + ".log");
-      assertThat(fileNamesInDir).hasSize(1);
-      // TODO: Verify contents of files. (Write tests for logs containing multiple log levels,
-      // where some lines get through a filter and some do not
-    }
+    File unzippedLogFileDir = unzipExportedLogs();
+    Set<String> acceptedLogLevels = Stream.of("info", "error", "debug").collect(toSet());
+    verifyZipFileContents(unzippedLogFileDir, acceptedLogLevels);
 
     // Ensure export logs region does not accumulate data
-    server.invoke(() -> {
+    server1.invoke(() -> {
       Region exportLogsRegion = ExportLogsFunction.createOrGetExistingExportLogsRegion(false);
       assertThat(exportLogsRegion.size()).isEqualTo(0);
     });
@@ -118,5 +161,114 @@ public class ExportLogsDUnit {
     });
   }
 
+  public void verifyZipFileContents(File unzippedLogFileDir, Set<String> acceptedLogLevels)
+      throws IOException {
+    Set<File> dirsFromZipFile =
+        Stream.of(unzippedLogFileDir.listFiles()).filter(File::isDirectory).collect(toSet());
+    assertThat(dirsFromZipFile).hasSize(expectedMessages.keySet().size());
+
+    Set<String> expectedDirNames =
+        expectedMessages.keySet().stream().map(Member::getName).collect(toSet());
+    Set<String> actualDirNames = dirsFromZipFile.stream().map(File::getName).collect(toSet());
+    assertThat(actualDirNames).isEqualTo(expectedDirNames);
+
+    System.out.println("Unzipped artifacts:");
+    for (File dir : dirsFromZipFile) {
+      verifyLogFileContents(acceptedLogLevels, dir);
+    }
+  }
+
+  public void verifyLogFileContents(Set<String> acceptedLogLevels, File dirForMember)
+      throws IOException {
 
+    String memberName = dirForMember.getName();
+    Member member = expectedMessages.keySet().stream()
+        .filter((Member aMember) -> aMember.getName().equals(memberName))
+        .findFirst()
+        .get();
+
+    assertThat(member).isNotNull();
+
+    Set<String> fileNamesInDir =
+        Stream.of(dirForMember.listFiles()).map(File::getName).collect(toSet());
+
+    System.out.println(dirForMember.getCanonicalPath() + " : " + fileNamesInDir);
+
+    File logFileForMember = new File(dirForMember, memberName + ".log");
+    assertThat(logFileForMember).exists();
+    assertThat(fileNamesInDir).hasSize(1);
+
+    String logFileContents =
+        FileUtils.readLines(logFileForMember, Charset.defaultCharset()).stream()
+            .collect(joining("\n"));
+
+    for (LogLine logLine : expectedMessages.get(member)) {
+      boolean shouldExpectLogLine = acceptedLogLevels.contains(logLine.level);
+
+      if (shouldExpectLogLine) {
+        assertThat(logFileContents).contains(logLine.getMessage());
+      } else {
+        assertThat(logFileContents).doesNotContain(logLine.getMessage());
+      }
+    }
+
+  }
+
+  private File unzipExportedLogs() throws IOException {
+    File locatorWorkingDir = locator.getWorkingDir();
+    List<File> filesInDir = Stream.of(locatorWorkingDir.listFiles()).collect(toList());
+    assertThat(filesInDir).isNotEmpty();
+
+
+    List<File> zipFilesInDir = Stream.of(locatorWorkingDir.listFiles())
+        .filter(f -> f.getName().endsWith(".zip")).collect(toList());
+    assertThat(zipFilesInDir).describedAs(filesInDir.stream().map(File::getAbsolutePath).collect(joining(","))).hasSize(1);
+
+    File unzippedLogFileDir = lsRule.getTempFolder().newFolder("unzippedLogs");
+    ZipUtils.unzip(zipFilesInDir.get(0).getCanonicalPath(), unzippedLogFileDir.getCanonicalPath());
+    return unzippedLogFileDir;
+  }
+
+  private List<LogLine> listOfLogLines(String memberName, String... levels) {
+    return Stream.of(levels).map(level -> new LogLine(level, memberName)).collect(toList());
+  }
+
+
+  public static class LogLine implements Serializable {
+    String level;
+    String message;
+
+    public LogLine(String level, String memberName) {
+      this.level = level;
+      this.message = buildMessage(memberName);
+    }
+
+    public String getMessage() {
+      return message;
+    }
+
+    private String buildMessage(String memberName) {
+      StringBuilder stringBuilder = new StringBuilder();
+      if (Objects.equals(level, "error")) {
+        stringBuilder.append(ERROR_LOG_PREFIX);
+      }
+      stringBuilder.append(level);
+
+      return stringBuilder.append(memberName).toString();
+    }
+
+
+    public void writeLog(Logger logger) {
+      switch (this.level) {
+        case "info":
+          logger.info(getMessage());
+          break;
+        case "error":
+          logger.error(getMessage());
+          break;
+        case "debug":
+          logger.debug(getMessage());
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionTest.java
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionTest.java
index 023c0a2..144dffd 100644
--- a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionTest.java
@@ -46,7 +46,9 @@ public class ExportLogsFunctionTest {
   @Rule
   public RestoreSystemProperties restoreSystemProperties = new RestoreSystemProperties();
 
-  private ServerStarterRule serverStarterRule;
+  @Rule
+  public ServerStarterRule serverStarterRule = new ServerStarterRule();
+
   private File serverWorkingDir;
 
   @Before
@@ -54,7 +56,6 @@ public class ExportLogsFunctionTest {
     serverWorkingDir = temporaryFolder.newFolder("serverWorkingDir");
     System.setProperty("user.dir", serverWorkingDir.getCanonicalPath());
 
-    serverStarterRule = new ServerStarterRule(new Properties());
     serverStarterRule.startServer();
   }
 
@@ -93,6 +94,16 @@ public class ExportLogsFunctionTest {
     assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
   }
 
+
+  @Test
+  public void argsCorrectlyBuildsLogLevelFilter() {
+    ExportLogsFunction.Args args = new ExportLogsFunction.Args(null, null, "info", false);
+
+    assertThat(args.getPermittedLogLevels()).contains("info");
+    assertThat(args.getPermittedLogLevels()).contains("error");
+    assertThat(args.getPermittedLogLevels()).doesNotContain("fine");
+  }
+
   private static class CapturingResultSender implements ResultSender {
     private Throwable t;
 

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/MergeLogsTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/MergeLogsTest.java
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/MergeLogsTest.java
new file mode 100644
index 0000000..7f4ffd4
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/cli/util/MergeLogsTest.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.geode.management.internal.cli.util;
+
+import static java.util.stream.Collectors.joining;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.geode.distributed.internal.DistributionConfig;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.test.dunit.rules.Locator;
+import org.apache.geode.test.dunit.rules.LocatorServerStartupRule;
+import org.apache.geode.test.dunit.rules.Server;
+import org.apache.geode.test.junit.categories.DistributedTest;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.Properties;
+
+@Category(DistributedTest.class)
+public class MergeLogsTest {
+  @Rule
+  public LocatorServerStartupRule lsRule = new LocatorServerStartupRule();
+  private Locator locator;
+
+  private static final String MESSAGE_1 = "MergeLogsMessage1";
+  private static final String MESSAGE_2 = "MergeLogsMessage2";
+  private static final String MESSAGE_3 = "MergeLogsMessage3";
+  private static final String MESSAGE_4 = "MergeLogsMessage4";
+  private static final String MESSAGE_5 = "MergeLogsMessage5";
+  private static final String MESSAGE_6 = "MergeLogsMessage6";
+
+  @Before
+  public void setup() throws Exception {
+    Properties properties = new Properties();
+    properties.setProperty(DistributionConfig.LOG_LEVEL_NAME, "info");
+    locator = lsRule.startLocatorVM(0, properties);
+
+    properties
+        .setProperty(DistributionConfig.LOCATORS_NAME, "localhost[" + locator.getPort() +
"]");
+
+    Server server = lsRule.startServerVM(1, properties);
+    Server server2 = lsRule.startServerVM(2, properties);
+
+    locator.invoke(() -> LogService.getLogger().info(MESSAGE_1));
+    server.invoke(() -> LogService.getLogger().info(MESSAGE_2));
+    server2.invoke(() -> LogService.getLogger().info(MESSAGE_3));
+
+    locator.invoke(() -> LogService.getLogger().info(MESSAGE_4));
+    server.invoke(() -> LogService.getLogger().info(MESSAGE_5));
+    server2.invoke(() -> LogService.getLogger().info(MESSAGE_6));
+  }
+
+  @Test
+  public void testExportInProcess() throws Exception {
+    assertThat(MergeLogs.findLogFilesToMerge(lsRule.getTempFolder().getRoot())).hasSize(3);
+
+    File result = MergeLogs.mergeLogFile(lsRule.getTempFolder().getRoot().getCanonicalPath());
+    assertOnLogContents(result);
+  }
+    @Test
+  public void testExportInNewProcess() throws Throwable {
+    assertThat(MergeLogs.findLogFilesToMerge(lsRule.getTempFolder().getRoot())).hasSize(3);
+
+    MergeLogs.mergeLogsInNewProcess(lsRule.getTempFolder().getRoot().toPath());
+      File result = Arrays.stream(lsRule.getTempFolder().getRoot().listFiles()).filter((File
f) -> f.getName().startsWith("merge"))
+            .findFirst().orElseThrow(() -> {throw new AssertionError("No merged log file
found");});
+    assertOnLogContents(result);
+
+  }
+
+  private void assertOnLogContents (File mergedLogFile) throws IOException {
+    String
+        mergedLines =
+        FileUtils.readLines(mergedLogFile, Charset.defaultCharset()).stream().collect(joining("\n"));
+
+    assertThat(mergedLines).contains(MESSAGE_1);
+    assertThat(mergedLines).contains(MESSAGE_2);
+    assertThat(mergedLines).contains(MESSAGE_3);
+    assertThat(mergedLines).contains(MESSAGE_4);
+    assertThat(mergedLines).contains(MESSAGE_5);
+    assertThat(mergedLines).contains(MESSAGE_6);
+
+    //Make sure that our merged log file contains the proper ordering
+    assertThat(mergedLines.indexOf(MESSAGE_1)).isLessThan(mergedLines.indexOf(MESSAGE_2));
+    assertThat(mergedLines.indexOf(MESSAGE_2)).isLessThan(mergedLines.indexOf(MESSAGE_3));
+    assertThat(mergedLines.indexOf(MESSAGE_3)).isLessThan(mergedLines.indexOf(MESSAGE_4));
+    assertThat(mergedLines.indexOf(MESSAGE_4)).isLessThan(mergedLines.indexOf(MESSAGE_5));
+    assertThat(mergedLines.indexOf(MESSAGE_5)).isLessThan(mergedLines.indexOf(MESSAGE_6));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/test/java/org/apache/geode/management/internal/configuration/ZipUtilsJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/management/internal/configuration/ZipUtilsJUnitTest.java
b/geode-core/src/test/java/org/apache/geode/management/internal/configuration/ZipUtilsJUnitTest.java
index 1791574..6f13f62 100755
--- a/geode-core/src/test/java/org/apache/geode/management/internal/configuration/ZipUtilsJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/management/internal/configuration/ZipUtilsJUnitTest.java
@@ -17,6 +17,7 @@ package org.apache.geode.management.internal.configuration;
 import static org.junit.Assert.*;
 
 import java.io.File;
+import java.io.IOException;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.FilenameUtils;
@@ -107,4 +108,19 @@ public class ZipUtilsJUnitTest {
     assertTrue(clusterText.equals(FileUtils.readFileToString(destinationClusterTextFile)));
     assertTrue(groupText.equals(FileUtils.readFileToString(destinationGroupTextFile)));
   }
+
+  @Test
+  public void zipUtilsCanCreateParentDirsIfNecessary() throws IOException {
+    File newFolder = new File(zipFolder, "newFolder");
+    assertFalse(newFolder.exists());
+
+    File zipFile = new File(newFolder, "target.zip");
+    assertFalse(zipFile.exists());
+    assertFalse(zipFile.isFile());
+
+    ZipUtils.zipDirectory(sourceFolder.getCanonicalPath(), zipFile.getCanonicalPath());
+    assertTrue(newFolder.exists());
+    assertTrue(zipFile.exists());
+    assertTrue(zipFile.isFile());
+  }
 }

http://git-wip-us.apache.org/repos/asf/geode/blob/eef1601d/geode-core/src/test/java/org/apache/geode/test/dunit/rules/GfshShellConnectionRule.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/test/dunit/rules/GfshShellConnectionRule.java
b/geode-core/src/test/java/org/apache/geode/test/dunit/rules/GfshShellConnectionRule.java
index 4729be3..93572fe 100644
--- a/geode-core/src/test/java/org/apache/geode/test/dunit/rules/GfshShellConnectionRule.java
+++ b/geode-core/src/test/java/org/apache/geode/test/dunit/rules/GfshShellConnectionRule.java
@@ -164,6 +164,10 @@ public class GfshShellConnectionRule extends DescribedExternalResource
{
 
   public CommandResult executeAndVerifyCommand(String command) throws Exception {
     CommandResult result = executeCommand(command);
+
+    if (result.getStatus() != Result.Status.OK) {
+      System.out.println("broken");
+    }
     assertThat(result.getStatus()).describedAs(result.getContent().toString())
         .isEqualTo(Result.Status.OK);
     return result;


Mime
View raw message