Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id EF140200CF3 for ; Tue, 29 Aug 2017 18:30:22 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id ED3B5166F7A; Tue, 29 Aug 2017 16:30:22 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 7D330166F97 for ; Tue, 29 Aug 2017 18:30:21 +0200 (CEST) Received: (qmail 18360 invoked by uid 500); 29 Aug 2017 16:30:20 -0000 Mailing-List: contact commits-help@geode.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@geode.apache.org Delivered-To: mailing list commits@geode.apache.org Received: (qmail 17162 invoked by uid 99); 29 Aug 2017 16:30:19 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 29 Aug 2017 16:30:19 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id DEBC3F5FCC; Tue, 29 Aug 2017 16:30:18 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: jinmeiliao@apache.org To: commits@geode.apache.org Date: Tue, 29 Aug 2017 16:30:33 -0000 Message-Id: <4f746730b4a74b97b71a045d66e11573@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [16/25] geode git commit: GEODE-3436: Restore refactoring of Refactoring MiscellaneousCommands archived-at: Tue, 29 Aug 2017 16:30:23 -0000 GEODE-3436: Restore refactoring of Refactoring MiscellaneousCommands * See initial commit GEODE-3265 (63169699e933f6e0fdd90b95ed039e4e3c92c32c) Project: http://git-wip-us.apache.org/repos/asf/geode/repo Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/611095f0 Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/611095f0 Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/611095f0 Branch: refs/heads/develop Commit: 611095f0a196ee58a51da6a75367791eff18ca19 Parents: 39fff45 Author: YehEmily Authored: Mon Aug 7 15:37:23 2017 -0700 Committer: Jinmei Liao Committed: Tue Aug 29 09:27:26 2017 -0700 ---------------------------------------------------------------------- .../cli/commands/ChangeLogLevelCommand.java | 163 ++ .../cli/commands/ExportStackTraceCommand.java | 182 ++ .../internal/cli/commands/GCCommand.java | 131 ++ .../cli/commands/MiscellaneousCommands.java | 1926 ------------------ .../internal/cli/commands/NetstatCommand.java | 212 ++ .../cli/commands/ShowDeadlockCommand.java | 92 + .../internal/cli/commands/ShowLogCommand.java | 102 + .../cli/commands/ShowMetricsCommand.java | 1085 ++++++++++ .../internal/cli/commands/ShutdownCommand.java | 209 ++ .../MiscellaneousCommandsController.java | 9 +- .../cli/commands/LogLevelInterceptorTest.java | 19 +- .../MiscellaneousCommandsDUnitTest.java | 136 +- .../security/GfshCommandsSecurityTest.java | 4 +- .../internal/security/TestCommand.java | 9 +- 14 files changed, 2275 insertions(+), 2004 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/geode/blob/611095f0/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ChangeLogLevelCommand.java ---------------------------------------------------------------------- diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ChangeLogLevelCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ChangeLogLevelCommand.java new file mode 100644 index 0000000..823c113 --- /dev/null +++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ChangeLogLevelCommand.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.geode.management.internal.cli.commands; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.springframework.shell.core.annotation.CliCommand; +import org.springframework.shell.core.annotation.CliOption; + +import org.apache.geode.LogWriter; +import org.apache.geode.cache.execute.Execution; +import org.apache.geode.cache.execute.Function; +import org.apache.geode.cache.execute.FunctionService; +import org.apache.geode.distributed.DistributedMember; +import org.apache.geode.internal.cache.GemFireCacheImpl; +import org.apache.geode.internal.cache.InternalCache; +import org.apache.geode.internal.logging.log4j.LogLevel; +import org.apache.geode.management.cli.CliMetaData; +import org.apache.geode.management.cli.ConverterHint; +import org.apache.geode.management.cli.Result; +import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor; +import org.apache.geode.management.internal.cli.CliUtil; +import org.apache.geode.management.internal.cli.GfshParseResult; +import org.apache.geode.management.internal.cli.LogWrapper; +import org.apache.geode.management.internal.cli.functions.ChangeLogLevelFunction; +import org.apache.geode.management.internal.cli.i18n.CliStrings; +import org.apache.geode.management.internal.cli.result.CompositeResultData; +import org.apache.geode.management.internal.cli.result.ResultBuilder; +import org.apache.geode.management.internal.cli.result.TabularResultData; +import org.apache.geode.management.internal.security.ResourceOperation; +import org.apache.geode.security.ResourcePermission; + +public class ChangeLogLevelCommand implements GfshCommand { + @CliCommand(value = CliStrings.CHANGE_LOGLEVEL, help = CliStrings.CHANGE_LOGLEVEL__HELP) + @CliMetaData(relatedTopic = {CliStrings.TOPIC_LOGS}, + interceptor = "org.apache.geode.management.internal.cli.commands.ChangeLogLevelCommand$ChangeLogLevelCommandInterceptor") + @ResourceOperation(resource = ResourcePermission.Resource.CLUSTER, + operation = ResourcePermission.Operation.WRITE) + public Result changeLogLevel( + @CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS}, + help = CliStrings.CHANGE_LOGLEVEL__MEMBER__HELP) String[] memberIds, + @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, unspecifiedDefaultValue = "", + help = CliStrings.CHANGE_LOGLEVEL__GROUPS__HELP) String[] grps, + @CliOption(key = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL, + optionContext = ConverterHint.LOG_LEVEL, mandatory = true, unspecifiedDefaultValue = "", + help = CliStrings.CHANGE_LOGLEVEL__LOGLEVEL__HELP) String logLevel) { + try { + if ((memberIds == null || memberIds.length == 0) && (grps == null || grps.length == 0)) { + return ResultBuilder + .createUserErrorResult(CliStrings.CHANGE_LOGLEVEL__MSG__SPECIFY_GRP_OR_MEMBER); + } + + InternalCache cache = GemFireCacheImpl.getInstance(); + LogWriter logger = cache.getLogger(); + + Set dsMembers = new HashSet<>(); + Set ds = CliUtil.getAllMembers(cache); + + if (grps != null && grps.length > 0) { + for (String grp : grps) { + dsMembers.addAll(cache.getDistributedSystem().getGroupMembers(grp)); + } + } + + if (memberIds != null && memberIds.length > 0) { + for (String member : memberIds) { + for (DistributedMember mem : ds) { + if (mem.getName() != null + && (mem.getName().equals(member) || mem.getId().equals(member))) { + dsMembers.add(mem); + break; + } + } + } + } + + if (dsMembers.size() == 0) { + return ResultBuilder.createGemFireErrorResult(CliStrings.CHANGE_LOGLEVEL__MSG_NO_MEMBERS); + } + + Function logFunction = new ChangeLogLevelFunction(); + FunctionService.registerFunction(logFunction); + Object[] functionArgs = new Object[1]; + functionArgs[0] = logLevel; + + CompositeResultData compositeResultData = ResultBuilder.createCompositeResultData(); + CompositeResultData.SectionResultData section = compositeResultData.addSection("section"); + TabularResultData resultTable = section.addTable("ChangeLogLevel"); + resultTable = resultTable.setHeader("Summary"); + + Execution execution = FunctionService.onMembers(dsMembers).setArguments(functionArgs); + if (execution == null) { + return ResultBuilder.createUserErrorResult(CliStrings.CHANGE_LOGLEVEL__MSG__CANNOT_EXECUTE); + } + List resultList = (List) execution.execute(logFunction).getResult(); + + for (Object object : resultList) { + try { + if (object instanceof Throwable) { + logger.warning( + "Exception in ChangeLogLevelFunction " + ((Throwable) object).getMessage(), + ((Throwable) object)); + continue; + } + + if (object != null) { + Map resultMap = (Map) object; + Map.Entry entry = resultMap.entrySet().iterator().next(); + + if (entry.getValue().contains("ChangeLogLevelFunction exception")) { + resultTable.accumulate(CliStrings.CHANGE_LOGLEVEL__COLUMN_MEMBER, entry.getKey()); + resultTable.accumulate(CliStrings.CHANGE_LOGLEVEL__COLUMN_STATUS, "false"); + } else { + resultTable.accumulate(CliStrings.CHANGE_LOGLEVEL__COLUMN_MEMBER, entry.getKey()); + resultTable.accumulate(CliStrings.CHANGE_LOGLEVEL__COLUMN_STATUS, "true"); + } + + } + } catch (Exception ex) { + LogWrapper.getInstance().warning("change log level command exception " + ex); + } + } + + Result result = ResultBuilder.buildResult(compositeResultData); + logger.info("change log-level command result=" + result); + return result; + } catch (Exception ex) { + GemFireCacheImpl.getInstance().getLogger().error("GFSH Changeloglevel exception: " + ex); + return ResultBuilder.createUserErrorResult(ex.getMessage()); + } + } + + public static class ChangeLogLevelCommandInterceptor extends AbstractCliAroundInterceptor { + @Override + public Result preExecution(GfshParseResult parseResult) { + Map arguments = parseResult.getParamValueStrings(); + // validate log level + String logLevel = arguments.get("loglevel"); + if (StringUtils.isBlank(logLevel) || LogLevel.getLevel(logLevel) == null) { + return ResultBuilder.createUserErrorResult("Invalid log level: " + logLevel); + } + + return ResultBuilder.createInfoResult(""); + } + } +} http://git-wip-us.apache.org/repos/asf/geode/blob/611095f0/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportStackTraceCommand.java ---------------------------------------------------------------------- diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportStackTraceCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportStackTraceCommand.java new file mode 100644 index 0000000..a5749ca --- /dev/null +++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportStackTraceCommand.java @@ -0,0 +1,182 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.geode.management.internal.cli.commands; + +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.zip.GZIPInputStream; + +import org.springframework.shell.core.annotation.CliCommand; +import org.springframework.shell.core.annotation.CliOption; + +import org.apache.geode.cache.execute.ResultCollector; +import org.apache.geode.distributed.DistributedMember; +import org.apache.geode.distributed.internal.InternalDistributedSystem; +import org.apache.geode.internal.cache.InternalCache; +import org.apache.geode.management.cli.CliMetaData; +import org.apache.geode.management.cli.ConverterHint; +import org.apache.geode.management.cli.Result; +import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor; +import org.apache.geode.management.internal.cli.CliUtil; +import org.apache.geode.management.internal.cli.GfshParseResult; +import org.apache.geode.management.internal.cli.domain.StackTracesPerMember; +import org.apache.geode.management.internal.cli.functions.GetStackTracesFunction; +import org.apache.geode.management.internal.cli.i18n.CliStrings; +import org.apache.geode.management.internal.cli.result.InfoResultData; +import org.apache.geode.management.internal.cli.result.ResultBuilder; +import org.apache.geode.management.internal.security.ResourceOperation; +import org.apache.geode.security.ResourcePermission; + +public class ExportStackTraceCommand implements GfshCommand { + private final GetStackTracesFunction getStackTracesFunction = new GetStackTracesFunction(); + + /** + * Current implementation supports writing it to a file and returning the location of the file + */ + @CliCommand(value = CliStrings.EXPORT_STACKTRACE, help = CliStrings.EXPORT_STACKTRACE__HELP) + @CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_DEBUG_UTIL}) + @ResourceOperation(resource = ResourcePermission.Resource.CLUSTER, + operation = ResourcePermission.Operation.READ) + public Result exportStackTrace(@CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS}, + optionContext = ConverterHint.ALL_MEMBER_IDNAME, + help = CliStrings.EXPORT_STACKTRACE__HELP) String[] memberNameOrId, + + @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, + optionContext = ConverterHint.ALL_MEMBER_IDNAME, help = CliStrings.GROUP) String[] group, + + @CliOption(key = CliStrings.EXPORT_STACKTRACE__FILE, + help = CliStrings.EXPORT_STACKTRACE__FILE__HELP) String fileName, + + @CliOption(key = CliStrings.EXPORT_STACKTRACE__FAIL__IF__FILE__PRESENT, + unspecifiedDefaultValue = "false", + help = CliStrings.EXPORT_STACKTRACE__FAIL__IF__FILE__PRESENT__HELP) boolean failIfFilePresent) { + + Result result; + StringBuilder filePrefix = new StringBuilder("stacktrace"); + + if (fileName == null) { + fileName = filePrefix.append("_").append(System.currentTimeMillis()).toString(); + } + final File outFile = new File(fileName); + try { + if (outFile.exists() && failIfFilePresent) { + return ResultBuilder.createShellClientErrorResult(CliStrings.format( + CliStrings.EXPORT_STACKTRACE__ERROR__FILE__PRESENT, outFile.getCanonicalPath())); + } + + + InternalCache cache = getCache(); + InternalDistributedSystem ads = cache.getInternalDistributedSystem(); + + InfoResultData resultData = ResultBuilder.createInfoResultData(); + + Map dumps = new HashMap<>(); + Set targetMembers = CliUtil.findMembers(group, memberNameOrId); + if (targetMembers.isEmpty()) { + return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE); + } + + ResultCollector rc = + CliUtil.executeFunction(getStackTracesFunction, null, targetMembers); + ArrayList resultList = (ArrayList) rc.getResult(); + + for (Object resultObj : resultList) { + if (resultObj instanceof StackTracesPerMember) { + StackTracesPerMember stackTracePerMember = (StackTracesPerMember) resultObj; + dumps.put(stackTracePerMember.getMemberNameOrId(), stackTracePerMember.getStackTraces()); + } + } + + String filePath = writeStacksToFile(dumps, fileName); + resultData.addLine(CliStrings.format(CliStrings.EXPORT_STACKTRACE__SUCCESS, filePath)); + resultData.addLine(CliStrings.EXPORT_STACKTRACE__HOST + ads.getDistributedMember().getHost()); + + result = ResultBuilder.buildResult(resultData); + } catch (IOException ex) { + result = ResultBuilder + .createGemFireErrorResult(CliStrings.EXPORT_STACKTRACE__ERROR + ex.getMessage()); + } + return result; + } + + // TODO PSR: ExportStackTrace Interceptor appeared to exist, but was not hooked to command and has + // a clearly incorrect javadoc. + // TODO PSR: It appears it was introduced in 2016-11-26: 903135115a0466d86fa663e965ace3ff47eba6b4, + // but never correctly linked to the command. + public static class ExportStackTraceInterceptor extends AbstractCliAroundInterceptor { + @Override + public Result preExecution(GfshParseResult parseResult) { + + Map paramValueMap = parseResult.getParamValueStrings(); + String fileName = paramValueMap.get(CliStrings.EXPORT_STACKTRACE__FILE); + + Response response = readYesNo( + CliStrings.format(CliStrings.EXPORT_STACKTRACE_WARN_USER, fileName), Response.YES); + if (response == Response.NO) { + return ResultBuilder + .createShellClientAbortOperationResult(CliStrings.EXPORT_STACKTRACE_MSG_ABORTING); + } else { + // we don't to show any info result + return ResultBuilder.createInfoResult(""); + } + } + } + + /*** + * Writes the Stack traces member-wise to a text file + * + * @param dumps - Map containing key : member , value : zipped stack traces + * @param fileName - Name of the file to which the stack-traces are written to + * @return Canonical path of the file which contains the stack-traces + * @throws IOException + */ + private String writeStacksToFile(Map dumps, String fileName) throws IOException { + String filePath; + PrintWriter ps; + File outputFile; + + outputFile = new File(fileName); + try (OutputStream os = new FileOutputStream(outputFile)) { + ps = new PrintWriter(os); + + for (Map.Entry entry : dumps.entrySet()) { + ps.append("*** Stack-trace for member ").append(entry.getKey()).append(" ***"); + ps.flush(); + GZIPInputStream zipIn = new GZIPInputStream(new ByteArrayInputStream(entry.getValue())); + BufferedInputStream bin = new BufferedInputStream(zipIn); + byte[] buffer = new byte[10000]; + int count; + while ((count = bin.read(buffer)) != -1) { + os.write(buffer, 0, count); + } + ps.append('\n'); + } + ps.flush(); + filePath = outputFile.getCanonicalPath(); + } + + return filePath; + } +} http://git-wip-us.apache.org/repos/asf/geode/blob/611095f0/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/GCCommand.java ---------------------------------------------------------------------- diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/GCCommand.java b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/GCCommand.java new file mode 100644 index 0000000..4884d6d --- /dev/null +++ b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/GCCommand.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.apache.geode.management.internal.cli.commands; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.shell.core.annotation.CliCommand; +import org.springframework.shell.core.annotation.CliOption; + +import org.apache.geode.cache.execute.Function; +import org.apache.geode.distributed.DistributedMember; +import org.apache.geode.internal.cache.InternalCache; +import org.apache.geode.management.cli.CliMetaData; +import org.apache.geode.management.cli.ConverterHint; +import org.apache.geode.management.cli.Result; +import org.apache.geode.management.internal.cli.CliUtil; +import org.apache.geode.management.internal.cli.LogWrapper; +import org.apache.geode.management.internal.cli.functions.GarbageCollectionFunction; +import org.apache.geode.management.internal.cli.i18n.CliStrings; +import org.apache.geode.management.internal.cli.result.CompositeResultData; +import org.apache.geode.management.internal.cli.result.ResultBuilder; +import org.apache.geode.management.internal.cli.result.TabularResultData; +import org.apache.geode.management.internal.security.ResourceOperation; +import org.apache.geode.security.ResourcePermission; + +public class GCCommand implements GfshCommand { + @CliCommand(value = CliStrings.GC, help = CliStrings.GC__HELP) + @CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_DEBUG_UTIL}) + @ResourceOperation(resource = ResourcePermission.Resource.CLUSTER, + operation = ResourcePermission.Operation.MANAGE) + public Result gc( + @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, + help = CliStrings.GC__GROUP__HELP) String[] groups, + @CliOption(key = CliStrings.MEMBER, optionContext = ConverterHint.ALL_MEMBER_IDNAME, + help = CliStrings.GC__MEMBER__HELP) String memberId) { + InternalCache cache = getCache(); + Result result; + CompositeResultData gcResultTable = ResultBuilder.createCompositeResultData(); + TabularResultData resultTable = gcResultTable.addSection().addTable("Table1"); + String headerText = "GC Summary"; + resultTable.setHeader(headerText); + Set dsMembers = new HashSet<>(); + if (memberId != null && memberId.length() > 0) { + DistributedMember member = CliUtil.getDistributedMemberByNameOrId(memberId); + if (member == null) { + return ResultBuilder + .createGemFireErrorResult(memberId + CliStrings.GC__MSG__MEMBER_NOT_FOUND); + } + dsMembers.add(member); + result = executeAndBuildResult(resultTable, dsMembers); + } else if (groups != null && groups.length > 0) { + for (String group : groups) { + dsMembers.addAll(cache.getDistributedSystem().getGroupMembers(group)); + } + result = executeAndBuildResult(resultTable, dsMembers); + + } else { + // gc on entire cluster + // exclude locators + dsMembers = CliUtil.getAllNormalMembers(cache); + result = executeAndBuildResult(resultTable, dsMembers); + + } + return result; + } + + private Result executeAndBuildResult(TabularResultData resultTable, + Set dsMembers) { + try { + List resultList; + Function garbageCollectionFunction = new GarbageCollectionFunction(); + resultList = + (List) CliUtil.executeFunction(garbageCollectionFunction, null, dsMembers).getResult(); + + for (Object object : resultList) { + if (object instanceof Exception) { + LogWrapper.getInstance().fine("Exception in GC " + ((Throwable) object).getMessage(), + ((Throwable) object)); + continue; + } else if (object instanceof Throwable) { + LogWrapper.getInstance().fine("Exception in GC " + ((Throwable) object).getMessage(), + ((Throwable) object)); + continue; + } + + if (object != null) { + if (object instanceof String) { + // unexpected exception string - cache may be closed or something + return ResultBuilder.createUserErrorResult((String) object); + } else { + Map resultMap = (Map) object; + toTabularResultData(resultTable, resultMap.get("MemberId"), + resultMap.get("HeapSizeBeforeGC"), resultMap.get("HeapSizeAfterGC"), + resultMap.get("TimeSpentInGC")); + } + } else { + LogWrapper.getInstance().fine("ResultMap was null "); + } + } + } catch (Exception e) { + String stack = CliUtil.stackTraceAsString(e); + LogWrapper.getInstance().info("GC exception is " + stack); + return ResultBuilder.createGemFireErrorResult(e.getMessage() + ": " + stack); + } + return ResultBuilder.buildResult(resultTable); + } + + private void toTabularResultData(TabularResultData table, String memberId, String heapSizeBefore, + String heapSizeAfter, String timeTaken) { + table.accumulate(CliStrings.GC__MSG__MEMBER_NAME, memberId); + table.accumulate(CliStrings.GC__MSG__HEAP_SIZE_BEFORE_GC, heapSizeBefore); + table.accumulate(CliStrings.GC__MSG__HEAP_SIZE_AFTER_GC, heapSizeAfter); + table.accumulate(CliStrings.GC__MSG__TOTAL_TIME_IN_GC, timeTaken); + } +}