geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kl...@apache.org
Subject [11/50] [abbrv] incubator-geode git commit: GEODE-563: Moving gfsh tests from closed
Date Fri, 11 Dec 2015 22:05:44 GMT
http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
new file mode 100644
index 0000000..22a38d2
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/ListIndexCommandDUnitTest.java
@@ -0,0 +1,672 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.DataPolicy;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.Scope;
+import com.gemstone.gemfire.cache.query.Index;
+import com.gemstone.gemfire.cache.query.IndexStatistics;
+import com.gemstone.gemfire.cache.query.IndexType;
+import com.gemstone.gemfire.cache.query.SelectResults;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.internal.lang.MutableIdentifiable;
+import com.gemstone.gemfire.internal.lang.ObjectUtils;
+import com.gemstone.gemfire.internal.lang.StringUtils;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.internal.cli.domain.IndexDetails;
+import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
+import dunit.Host;
+import dunit.SerializableRunnable;
+import dunit.VM;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Properties;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * The ListIndexCommandDUnitTest class is distributed test suite of test cases for testing the index-based GemFire shell
+ * (Gfsh) commands. </p>
+ *
+ * @author John Blum
+ * @see com.gemstone.gemfire.management.internal.cli.commands.CliCommandTestBase
+ * @see com.gemstone.gemfire.management.internal.cli.commands.IndexCommands
+ * @since 7.0
+ */
+@SuppressWarnings("unused")
+public class ListIndexCommandDUnitTest extends CliCommandTestBase {
+
+  protected static final int DEFAULT_REGION_INITIAL_CAPACITY = 10000;
+
+  private final AtomicLong idGenerator = new AtomicLong(0l);
+
+  protected static String toString(final Result result) {
+    assert result != null : "The Result object from the command execution cannot be null!";
+
+    final StringBuilder buffer = new StringBuilder(System.getProperty("line.separator"));
+
+    while (result.hasNextLine()) {
+      buffer.append(result.nextLine());
+      buffer.append(System.getProperty("line.separator"));
+    }
+
+    return buffer.toString();
+  }
+
+
+  public ListIndexCommandDUnitTest(final String testName) {
+    super(testName);
+  }
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    createDefaultSetup(null);
+    setupGemFire();
+  }
+
+  @Override
+  public void tearDown2() throws Exception {
+    super.tearDown2();
+  }
+
+  protected Index createIndex(final String name, final String indexedExpression, final String fromClause) {
+    return createIndex(name, IndexType.FUNCTIONAL, indexedExpression, fromClause);
+  }
+
+  protected Index createIndex(final String name, final IndexType type, final String indexedExpression,
+      final String fromClause) {
+    return new IndexAdapter(name, type, indexedExpression, fromClause);
+  }
+
+  protected Peer createPeer(final VM vm, final Properties distributedSystemProperties,
+      final RegionDefinition... regions) {
+    final Peer peer = new Peer(vm, distributedSystemProperties);
+    peer.add(regions);
+    return peer;
+  }
+
+  protected RegionDefinition createRegionDefinition(final String regionName, final Class<?> keyConstraint,
+      final Class<?> valueConstraint, final Index... indexes) {
+    final RegionDefinition regionDefinition = new RegionDefinition(regionName, keyConstraint, valueConstraint);
+    regionDefinition.add(indexes);
+    return regionDefinition;
+  }
+
+  protected void setupGemFire() throws Exception {
+    final Host host = Host.getHost(0);
+
+    final VM vm1 = host.getVM(1);
+    final VM vm2 = host.getVM(2);
+
+    final Peer peer1 = createPeer(vm1, createDistributedSystemProperties("consumerServer"),
+        createRegionDefinition("consumers", Long.class, Consumer.class,
+            createIndex("cidIdx", IndexType.PRIMARY_KEY, "id", "/consumers"),
+            createIndex("cnameIdx", "name", "/consumers")));
+
+    final Peer peer2 = createPeer(vm2, createDistributedSystemProperties("producerServer"),
+        createRegionDefinition("producers", Long.class, Producer.class, createIndex("pidIdx", "id", "/producers")));
+
+    createRegionWithIndexes(peer1);
+    createRegionWithIndexes(peer2);
+
+    loadConsumerData(peer1, 10000);
+    loadProducerData(peer2, 10000);
+  }
+
+  protected Properties createDistributedSystemProperties(final String gemfireName) {
+    final Properties distributedSystemProperties = new Properties();
+
+    distributedSystemProperties.setProperty(DistributionConfig.LOG_LEVEL_NAME, getDUnitLogLevel());
+    distributedSystemProperties.setProperty(DistributionConfig.NAME_NAME, gemfireName);
+
+    return distributedSystemProperties;
+  }
+
+  protected void createRegionWithIndexes(final Peer peer) {
+    peer.run(new SerializableRunnable(
+        String.format("Creating Regions with Indexes on GemFire peer (%1$s).", peer.getName())) {
+      public void run() {
+        // create the GemFire distributed system with custom configuration properties...
+        getSystem(peer.getConfiguration());
+
+        final Cache cache = getCache();
+        final RegionFactory regionFactory = cache.createRegionFactory();
+
+        for (RegionDefinition regionDefinition : peer) {
+          regionFactory.setDataPolicy(DataPolicy.REPLICATE);
+          regionFactory.setIndexMaintenanceSynchronous(true);
+          regionFactory.setInitialCapacity(DEFAULT_REGION_INITIAL_CAPACITY);
+          regionFactory.setKeyConstraint(regionDefinition.getKeyConstraint());
+          regionFactory.setScope(Scope.DISTRIBUTED_NO_ACK);
+          regionFactory.setStatisticsEnabled(true);
+          regionFactory.setValueConstraint(regionDefinition.getValueConstraint());
+
+          final Region region = regionFactory.create(regionDefinition.getRegionName());
+          String indexName = null;
+
+          try {
+            for (Index index : regionDefinition) {
+              indexName = index.getName();
+              if (IndexType.PRIMARY_KEY.equals(index.getType())) {
+                cache.getQueryService().createKeyIndex(indexName, index.getIndexedExpression(), region.getFullPath());
+              } else {
+                cache.getQueryService().createIndex(indexName, index.getIndexedExpression(), region.getFullPath());
+              }
+            }
+          } catch (Exception e) {
+            getLogWriter().error(
+                String.format("Error occurred creating Index (%1$s) on Region (%2$s) - (%3$s)", indexName,
+                    region.getFullPath(), e.getMessage()));
+          }
+        }
+      }
+    });
+  }
+
+  protected void loadConsumerData(final Peer peer, final int operationsTotal) {
+    peer.run(new SerializableRunnable("Load /consumers Region with data") {
+      public void run() {
+        final Cache cache = getCache();
+        final Region<Long, Consumer> consumerRegion = cache.getRegion("/consumers");
+
+        final Random random = new Random(System.currentTimeMillis());
+        int count = 0;
+
+        final List<Proxy> proxies = new ArrayList<Proxy>();
+
+        Consumer consumer;
+        Proxy proxy;
+
+        while (count++ < operationsTotal) {
+          switch (CrudOperation.values()[random.nextInt(CrudOperation.values().length)]) {
+            case RETRIEVE:
+              if (!proxies.isEmpty()) {
+                proxy = proxies.get(random.nextInt(proxies.size()));
+                consumer = query(consumerRegion, "id = " + proxy.getId() + "l"); // works
+                //consumer = query(consumerRegion, "Id = " + proxy.getId()); // works
+                //consumer = query(consumerRegion, "id = " + proxy.getId()); // does not work
+                proxy.setUnitsSnapshot(consumer.getUnits());
+                break;
+              }
+            case UPDATE:
+              if (!proxies.isEmpty()) {
+                proxy = proxies.get(random.nextInt(proxies.size()));
+                consumer = query(consumerRegion, "Name = " + proxy.getName());
+                consumer.consume();
+                break;
+              }
+            case CREATE:
+            default:
+              consumer = new Consumer(idGenerator.incrementAndGet());
+              proxies.add(new Proxy(consumer));
+              consumerRegion.put(consumer.getId(), consumer);
+              assertTrue(consumerRegion.containsKey(consumer.getId()));
+              assertTrue(consumerRegion.containsValueForKey(consumer.getId()));
+              assertSame(consumer, consumerRegion.get(consumer.getId()));
+          }
+        }
+      }
+    });
+  }
+
+  protected void loadProducerData(final Peer peer, final int operationsTotal) {
+    peer.run(new SerializableRunnable("Load /producers Region with data") {
+      public void run() {
+        final Cache cache = getCache();
+        final Region<Long, Producer> producerRegion = cache.getRegion("/producers");
+
+        final Random random = new Random(System.currentTimeMillis());
+        int count = 0;
+
+        final List<Proxy> proxies = new ArrayList<Proxy>();
+
+        Producer producer;
+        Proxy proxy;
+
+        while (count++ < operationsTotal) {
+          switch (CrudOperation.values()[random.nextInt(CrudOperation.values().length)]) {
+            case RETRIEVE:
+              if (!proxies.isEmpty()) {
+                proxy = proxies.get(random.nextInt(proxies.size()));
+                producer = query(producerRegion, "Id = " + proxy.getId());
+                proxy.setUnitsSnapshot(producer.getUnits());
+                break;
+              }
+            case UPDATE:
+              if (!proxies.isEmpty()) {
+                proxy = proxies.get(random.nextInt(proxies.size()));
+                producer = query(producerRegion, "Id = " + proxy.getId());
+                producer.produce();
+                break;
+              }
+            case CREATE:
+            default:
+              producer = new Producer(idGenerator.incrementAndGet());
+              proxies.add(new Proxy(producer));
+              producerRegion.put(producer.getId(), producer);
+              assertTrue(producerRegion.containsKey(producer.getId()));
+              assertTrue(producerRegion.containsValueForKey(producer.getId()));
+              assertSame(producer, producerRegion.get(producer.getId()));
+          }
+        }
+      }
+    });
+  }
+
+  @SuppressWarnings("unchecked")
+  protected <T extends Comparable<T>, B extends AbstractBean<T>> B query(final Cache cache, final String queryString) {
+    try {
+      getLogWriter().info(String.format("Running Query (%1$s) in GemFire...", queryString));
+
+      final SelectResults<B> results = (SelectResults<B>) cache.getQueryService().newQuery(queryString).execute();
+
+      getLogWriter().info(
+          String.format("Running Query (%1$s) in GemFire returned (%2$d) result(s).", queryString, results.size()));
+
+      return (results.iterator().hasNext() ? results.iterator().next() : null);
+    } catch (Exception e) {
+      throw new RuntimeException(String.format("An error occurred running Query (%1$s)!", queryString), e);
+    }
+  }
+
+  protected <T extends Comparable<T>, B extends AbstractBean<T>> B query(final Region<T, B> region,
+      final String queryPredicate) {
+    try {
+      getLogWriter().info(
+          String.format("Running Query (%1$s) on Region (%2$s)...", queryPredicate, region.getFullPath()));
+
+      final SelectResults<B> results = region.query(queryPredicate);
+
+      getLogWriter().info(
+          String.format("Running Query (%1$s) on Region (%2$s) returned (%3$d) result(s).", queryPredicate,
+              region.getFullPath(), results.size()));
+
+      return (results.iterator().hasNext() ? results.iterator().next() : null);
+    } catch (Exception e) {
+      throw new RuntimeException(
+          String.format("An error occurred running Query (%1$s) on Region (%2$s)!", queryPredicate,
+              region.getFullPath()), e);
+    }
+  }
+
+  public void testListIndex() throws Exception {
+    final Result result = executeCommand(CliStrings.LIST_INDEX + " --" + CliStrings.LIST_INDEX__STATS);
+
+    assertNotNull(result);
+    getLogWriter().info(toString(result));
+    assertEquals(Result.Status.OK, result.getStatus());
+  }
+
+  protected static class Peer implements Iterable<RegionDefinition>, Serializable {
+
+    private final Properties distributedSystemProperties;
+
+    private final Set<RegionDefinition> regions = new HashSet<RegionDefinition>();
+
+    private final VM vm;
+
+    public Peer(final VM vm, final Properties distributedSystemProperties) {
+      assert distributedSystemProperties != null : "The GemFire Distributed System configuration properties cannot be null!";
+      this.distributedSystemProperties = distributedSystemProperties;
+      this.vm = vm;
+    }
+
+    public Properties getConfiguration() {
+      return this.distributedSystemProperties;
+    }
+
+    public String getName() {
+      return getConfiguration().getProperty(DistributionConfig.NAME_NAME);
+    }
+
+    public VM getVm() {
+      return vm;
+    }
+
+    public boolean add(final RegionDefinition... regionDefinitions) {
+      return (regionDefinitions != null && regions.addAll(Arrays.asList(regionDefinitions)));
+    }
+
+    public Iterator<RegionDefinition> iterator() {
+      return Collections.unmodifiableSet(regions).iterator();
+    }
+
+    public boolean remove(final RegionDefinition... regionDefinitions) {
+      return (regionDefinitions != null && regions.removeAll(Arrays.asList(regionDefinitions)));
+    }
+
+    public void run(final Runnable runnable) {
+      if (getVm() == null) {
+        runnable.run();
+      } else {
+        getVm().invoke(runnable);
+      }
+    }
+
+    @Override
+    public String toString() {
+      final StringBuilder buffer = new StringBuilder(getClass().getSimpleName());
+      buffer.append(" {configuration = ").append(getConfiguration());
+      buffer.append(", name = ").append(getName());
+      buffer.append(", pid = ").append(getVm().getPid());
+      buffer.append("}");
+      return buffer.toString();
+    }
+  }
+
+  protected static class IndexAdapter implements Index, Serializable {
+
+    private final IndexDetails.IndexType type;
+
+    private final String fromClause;
+    private final String indexedExpression;
+    private final String name;
+
+    protected IndexAdapter(final String name, final String indexedExpression, final String fromClause) {
+      this(name, IndexType.FUNCTIONAL, indexedExpression, fromClause);
+    }
+
+    protected IndexAdapter(final String name, final IndexType type, final String indexedExpression,
+        final String fromClause) {
+      assert name != null : "The name of the Index cannot be null!";
+      assert indexedExpression != null : String.format("The expression to index for Index (%1$s) cannot be null!",
+          name);
+      assert fromClause != null : String.format("The from clause for Index (%1$s) cannot be null!", name);
+
+      this.type = ObjectUtils.defaultIfNull(IndexDetails.IndexType.valueOf(type), IndexDetails.IndexType.FUNCTIONAL);
+      this.name = name;
+      this.indexedExpression = indexedExpression;
+      this.fromClause = fromClause;
+    }
+
+    public String getName() {
+      return this.name;
+    }
+
+    public String getFromClause() {
+      return this.fromClause;
+    }
+
+    public String getCanonicalizedFromClause() {
+      return this.fromClause;
+    }
+
+    public String getIndexedExpression() {
+      return this.indexedExpression;
+    }
+
+    public String getCanonicalizedIndexedExpression() {
+      return this.indexedExpression;
+    }
+
+    public String getProjectionAttributes() {
+      throw new UnsupportedOperationException("Not Implemented!");
+    }
+
+    public String getCanonicalizedProjectionAttributes() {
+      throw new UnsupportedOperationException("Not Implemented!");
+    }
+
+    public Region<?, ?> getRegion() {
+      throw new UnsupportedOperationException("Not Implemented!");
+    }
+
+    public IndexStatistics getStatistics() {
+      throw new UnsupportedOperationException("Not Implemented!");
+    }
+
+    public IndexType getType() {
+      return type.getType();
+    }
+
+    @Override
+    public String toString() {
+      final StringBuilder buffer = new StringBuilder(getClass().getSimpleName());
+      buffer.append(" {indexName = ").append(getName());
+      buffer.append(", indexType = ").append(getType());
+      buffer.append(", indexedExpression = ").append(getIndexedExpression());
+      buffer.append(", fromClause = ").append(getFromClause());
+      buffer.append("}");
+      return buffer.toString();
+    }
+  }
+
+  protected static class RegionDefinition implements Iterable<Index>, Serializable {
+
+    private final Class<?> keyConstraint;
+    private final Class<?> valueConstraint;
+
+    private final Set<Index> indexes = new HashSet<Index>();
+
+    private final String regionName;
+
+    @SuppressWarnings("unchecked")
+    protected RegionDefinition(final String regionName, final Class<?> keyConstraint, final Class<?> valueConstraint) {
+      assert !StringUtils.isBlank(regionName) : "The name of the Region must be specified!";
+      this.regionName = regionName;
+      this.keyConstraint = ObjectUtils.defaultIfNull(keyConstraint, Object.class);
+      this.valueConstraint = ObjectUtils.defaultIfNull(valueConstraint, Object.class);
+    }
+
+    public String getRegionName() {
+      return regionName;
+    }
+
+    public Class<?> getKeyConstraint() {
+      return keyConstraint;
+    }
+
+    public Class<?> getValueConstraint() {
+      return valueConstraint;
+    }
+
+    public boolean add(final Index... indexes) {
+      return (indexes != null && this.indexes.addAll(Arrays.asList(indexes)));
+    }
+
+    public Iterator<Index> iterator() {
+      return Collections.unmodifiableSet(indexes).iterator();
+    }
+
+    public boolean remove(final Index... indexes) {
+      return (indexes != null && this.indexes.removeAll(Arrays.asList(indexes)));
+    }
+
+    @Override
+    public boolean equals(final Object obj) {
+      if (obj == this) {
+        return true;
+      }
+
+      if (!(obj instanceof RegionDefinition)) {
+        return false;
+      }
+
+      final RegionDefinition that = (RegionDefinition) obj;
+
+      return ObjectUtils.equals(getRegionName(), that.getRegionName());
+    }
+
+    @Override
+    public int hashCode() {
+      int hashValue = 17;
+      hashValue = 37 * hashValue + ObjectUtils.hashCode(getRegionName());
+      return hashValue;
+    }
+
+    @Override
+    public String toString() {
+      final StringBuilder buffer = new StringBuilder(getClass().getSimpleName());
+      buffer.append(" {regionName = ").append(getRegionName());
+      buffer.append(", keyConstraint = ").append(getKeyConstraint());
+      buffer.append(", valueConstraint = ").append(getValueConstraint());
+      buffer.append("}");
+      return buffer.toString();
+    }
+  }
+
+  protected static abstract class AbstractBean<T extends Comparable<T>> implements MutableIdentifiable<T>, Serializable {
+
+    private T id;
+    private String name;
+
+    public AbstractBean() {
+    }
+
+    public AbstractBean(final T id) {
+      this.id = id;
+    }
+
+    public T getId() {
+      return id;
+    }
+
+    public void setId(final T id) {
+      this.id = id;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public void setName(final String name) {
+      this.name = name;
+    }
+
+    @Override
+    public boolean equals(final Object obj) {
+      if (obj == this) {
+        return true;
+      }
+
+      if (!(getClass().isInstance(obj))) {
+        return false;
+      }
+
+      final AbstractBean bean = (AbstractBean) obj;
+
+      return ObjectUtils.equals(getId(), bean.getId());
+    }
+
+    @Override
+    public int hashCode() {
+      int hashValue = 17;
+      hashValue = 37 * hashValue + ObjectUtils.hashCode(getId());
+      return hashValue;
+    }
+
+    @Override
+    public String toString() {
+      final StringBuilder buffer = new StringBuilder(getClass().getSimpleName());
+      buffer.append(" {id = ").append(getId());
+      buffer.append(", name = ").append(getName());
+      buffer.append("}");
+      return buffer.toString();
+    }
+  }
+
+  public static class Consumer extends AbstractBean<Long> {
+
+    private volatile int units;
+
+    public Consumer() {
+    }
+
+    public Consumer(final Long id) {
+      super(id);
+    }
+
+    public int getUnits() {
+      return units;
+    }
+
+    public int consume() {
+      return ++units;
+    }
+  }
+
+  public static class Producer extends AbstractBean<Long> {
+
+    private volatile int units;
+
+    public Producer() {
+    }
+
+    public Producer(final Long id) {
+      super(id);
+    }
+
+    public int getUnits() {
+      return units;
+    }
+
+    public int produce() {
+      return ++units;
+    }
+  }
+
+  public static class Proxy extends AbstractBean<Long> {
+
+    private final AbstractBean<Long> bean;
+    private int unitsSnapshot;
+
+    public Proxy(final AbstractBean<Long> bean) {
+      assert bean != null : "The bean to proxy cannot be null!";
+      this.bean = bean;
+    }
+
+    public AbstractBean<Long> getBean() {
+      return bean;
+    }
+
+    @Override
+    public Long getId() {
+      return getBean().getId();
+    }
+
+    @Override
+    public String getName() {
+      return getBean().getName();
+    }
+
+    public int getUnitsSnapshot() {
+      return unitsSnapshot;
+    }
+
+    public void setUnitsSnapshot(final int unitsSnapshot) {
+      this.unitsSnapshot = unitsSnapshot;
+    }
+  }
+
+  protected static enum CrudOperation {
+    CREATE,
+    RETRIEVE,
+    UPDATE,
+    DELETE
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
new file mode 100644
index 0000000..6623403
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MemberCommandsDUnitTest.java
@@ -0,0 +1,286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.EvictionAction;
+import com.gemstone.gemfire.cache.EvictionAttributes;
+import com.gemstone.gemfire.cache.FixedPartitionAttributes;
+import com.gemstone.gemfire.cache.PartitionAttributes;
+import com.gemstone.gemfire.cache.PartitionAttributesFactory;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.cache30.CacheTestCase;
+import com.gemstone.gemfire.distributed.DistributedMember;
+import com.gemstone.gemfire.distributed.Locator;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.internal.AvailablePortHelper;
+import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.cli.Result.Status;
+import com.gemstone.gemfire.management.internal.cli.CliUtil;
+import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
+import com.gemstone.gemfire.management.internal.cli.remote.CommandProcessor;
+import com.gemstone.gemfire.management.internal.cli.util.CommandStringBuilder;
+import dunit.Host;
+import dunit.SerializableRunnable;
+import dunit.VM;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+/****
+ * @author bansods since 7.0
+ */
+
+public class MemberCommandsDUnitTest extends CacheTestCase {
+  private static final long serialVersionUID = 1L;
+  private static final Map<String, String> EMPTY_ENV = Collections.emptyMap();
+  private static final String REGION1 = "region1";
+  private static final String REGION2 = "region2";
+  private static final String REGION3 = "region3";
+  private static final String SUBREGION1A = "subregion1A";
+  private static final String SUBREGION1B = "subregion1B";
+  private static final String SUBREGION1C = "subregion1C";
+  private static final String PR1 = "PartitionedRegion1";
+  private static final String PR2 = "ParitionedRegion2";
+
+  public MemberCommandsDUnitTest(String name) {
+    super(name);
+  }
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    // This test does not require an actual Gfsh connection to work, however when run as part of a suite, prior tests
+    // may mess up the environment causing this test to fail. Setting this prevents false failures.
+    CliUtil.isGfshVM = false;
+  }
+
+  @Override
+  public void tearDown2() throws Exception {
+    super.tearDown2();
+    CliUtil.isGfshVM = true;
+  }
+
+  private Properties createProperties(String name, String groups) {
+    Properties props = new Properties();
+    props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
+    props.setProperty(DistributionConfig.LOG_LEVEL_NAME, "info");
+    props.setProperty(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true");
+    props.setProperty(DistributionConfig.ENABLE_TIME_STATISTICS_NAME, "true");
+    props.setProperty(DistributionConfig.NAME_NAME, name);
+    props.setProperty(DistributionConfig.GROUPS_NAME, groups);
+    return props;
+  }
+
+  private void createRegionsWithSubRegions() {
+    final Cache cache = getCache();
+
+    RegionFactory<String, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.REPLICATE);
+    dataRegionFactory.setConcurrencyLevel(3);
+    Region<String, Integer> region1 = dataRegionFactory.create(REGION1);
+    region1.createSubregion(SUBREGION1C, region1.getAttributes());
+    Region<String, Integer> subregion2 = region1.createSubregion(SUBREGION1A, region1.getAttributes());
+
+    subregion2.createSubregion(SUBREGION1B, subregion2.getAttributes());
+    dataRegionFactory.create(REGION2);
+    dataRegionFactory.create(REGION3);
+  }
+
+  private void createPartitionedRegion1() {
+    final Cache cache = getCache();
+    // Create the data region
+    RegionFactory<String, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+    dataRegionFactory.create(PR1);
+  }
+
+  private void createPartitionedRegion(String regionName) {
+    final Cache cache = getCache();
+    // Create the data region
+    RegionFactory<String, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+    dataRegionFactory.setConcurrencyLevel(4);
+    EvictionAttributes ea = EvictionAttributes.createLIFOEntryAttributes(100, EvictionAction.LOCAL_DESTROY);
+    dataRegionFactory.setEvictionAttributes(ea);
+    dataRegionFactory.setEnableAsyncConflation(true);
+
+    FixedPartitionAttributes fpa = FixedPartitionAttributes.createFixedPartition("Par1", true);
+    PartitionAttributes pa = new PartitionAttributesFactory().setLocalMaxMemory(100).setRecoveryDelay(
+        2).setTotalMaxMemory(200).setRedundantCopies(1).addFixedPartitionAttributes(fpa).create();
+    dataRegionFactory.setPartitionAttributes(pa);
+
+    dataRegionFactory.create(regionName);
+  }
+
+
+  private void createLocalRegion() {
+    final Cache cache = getCache();
+    // Create the data region
+    RegionFactory<String, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.LOCAL);
+    dataRegionFactory.create("LocalRegion");
+  }
+
+  private void setupSystem() throws IOException {
+    disconnectAllFromDS();
+    final Host host = Host.getHost(0);
+    final VM[] servers = {host.getVM(0), host.getVM(1)};
+
+    final Properties propsMe = createProperties("me", "G1");
+    final Properties propsServer1 = createProperties("Server1", "G1");
+    final Properties propsServer2 = createProperties("Server2", "G2");
+
+
+    getSystem(propsMe);
+    final Cache cache = getCache();
+    RegionFactory<String, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.REPLICATE_PROXY);
+    dataRegionFactory.setConcurrencyLevel(5);
+    Region<String, Integer> region1 = dataRegionFactory.create(REGION1);
+
+
+    servers[1].invoke(new SerializableRunnable("Create cache for server1") {
+      public void run() {
+        getSystem(propsServer2);
+        createRegionsWithSubRegions();
+        createLocalRegion();
+        createPartitionedRegion("ParReg1");
+      }
+    });
+    servers[0].invoke(new SerializableRunnable("Create cache for server0") {
+      public void run() {
+        getSystem(propsServer1);
+        createRegionsWithSubRegions();
+        createLocalRegion();
+      }
+    });
+  }
+
+  private Properties createProperties(Host host, int locatorPort) {
+    Properties props = new Properties();
+
+    props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
+    props.setProperty(DistributionConfig.LOCATORS_NAME, getServerHostName(host) + "[" + locatorPort + "]");
+    props.setProperty(DistributionConfig.LOG_LEVEL_NAME, "info");
+    props.setProperty(DistributionConfig.STATISTIC_SAMPLING_ENABLED_NAME, "true");
+    props.setProperty(DistributionConfig.ENABLE_TIME_STATISTICS_NAME, "true");
+    props.put(DistributionConfig.ENABLE_NETWORK_PARTITION_DETECTION_NAME, "true");
+
+    return props;
+  }
+
+  /**
+   * Creates the cache.
+   */
+  private void createCache(Properties props) {
+    getSystem(props);
+    final Cache cache = getCache();
+  }
+
+  /***
+   * Tests the execution of "list member" command which should list out all the members in the DS
+   *
+   * @throws IOException
+   * @throws ClassNotFoundException
+   */
+  public void testListMemberAll() throws IOException, ClassNotFoundException {
+    setupSystem();
+    CommandProcessor commandProcessor = new CommandProcessor();
+    Result result = commandProcessor.createCommandStatement(CliStrings.LIST_MEMBER, EMPTY_ENV).process();
+    getLogWriter().info("#SB" + getResultAsString(result));
+    assertEquals(true, result.getStatus().equals(Status.OK));
+  }
+
+  /****
+   * Tests the execution of "list member" command, when no cache is created
+   *
+   * @throws IOException
+   * @throws ClassNotFoundException
+   */
+  public void testListMemberWithNoCache() throws IOException, ClassNotFoundException {
+    final Host host = Host.getHost(0);
+    final VM[] servers = {host.getVM(0), host.getVM(1)};
+    final int openPorts[] = AvailablePortHelper.getRandomAvailableTCPPorts(1);
+    final File logFile = new File(getUniqueName() + "-locator" + openPorts[0] + ".log");
+
+    Locator locator = Locator.startLocator(openPorts[0], logFile);
+    try {
+
+      final Properties props = createProperties(host, openPorts[0]);
+      CommandProcessor commandProcessor = new CommandProcessor();
+      Result result = commandProcessor.createCommandStatement(CliStrings.LIST_MEMBER, EMPTY_ENV).process();
+
+      getLogWriter().info("#SB" + getResultAsString(result));
+      assertEquals(true, result.getStatus().equals(Status.ERROR));
+    } finally {
+      locator.stop(); // fix for bug 46562
+    }
+  }
+
+  /***
+   * Tests list member --group=G1
+   *
+   * @throws IOException
+   * @throws ClassNotFoundException
+   */
+  public void testListMemberWithGroups() throws IOException, ClassNotFoundException {
+    setupSystem();
+    CommandProcessor commandProcessor = new CommandProcessor();
+    CommandStringBuilder csb = new CommandStringBuilder(CliStrings.LIST_MEMBER);
+    csb.addOption(CliStrings.LIST_MEMBER__GROUP, "G1");
+    Result result = commandProcessor.createCommandStatement(csb.toString(), EMPTY_ENV).process();
+    getLogWriter().info("#SB" + getResultAsString(result));
+    assertEquals(true, result.getStatus().equals(Status.OK));
+  }
+
+  /***
+   * Tests the "describe member" command for all the members in the DS
+   *
+   * @throws IOException
+   * @throws ClassNotFoundException
+   */
+  public void testDescribeMember() throws IOException, ClassNotFoundException {
+    setupSystem();
+    CommandProcessor commandProcessor = new CommandProcessor();
+    GemFireCacheImpl cache = (GemFireCacheImpl) CacheFactory.getAnyInstance();
+    Set<DistributedMember> members = cache.getDistributedSystem().getAllOtherMembers();
+
+    Iterator<DistributedMember> iters = members.iterator();
+
+    while (iters.hasNext()) {
+      DistributedMember member = iters.next();
+      Result result = commandProcessor.createCommandStatement("describe member --name=" + member.getId(),
+          EMPTY_ENV).process();
+      assertEquals(true, result.getStatus().equals(Status.OK));
+      getLogWriter().info("#SB" + getResultAsString(result));
+      //assertEquals(true, result.getStatus().equals(Status.OK));
+    }
+  }
+
+  private String getResultAsString(Result result) {
+    StringBuilder sb = new StringBuilder();
+    while (result.hasNextLine()) {
+      sb.append(result.nextLine());
+    }
+    return sb.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
new file mode 100644
index 0000000..ca3f94d
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsDUnitTest.java
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheClosedException;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
+import com.gemstone.gemfire.internal.lang.ThreadUtils;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.cli.Result.Status;
+import com.gemstone.gemfire.management.internal.cli.HeadlessGfsh;
+import com.gemstone.gemfire.management.internal.cli.i18n.CliStrings;
+import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
+import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData;
+import com.gemstone.gemfire.management.internal.cli.result.CompositeResultData.SectionResultData;
+import com.gemstone.gemfire.management.internal.cli.result.ResultBuilder;
+import com.gemstone.gemfire.management.internal.cli.result.ResultData;
+import com.gemstone.gemfire.management.internal.cli.result.TabularResultData;
+import dunit.Host;
+import dunit.SerializableCallable;
+import dunit.SerializableRunnable;
+import dunit.VM;
+import org.junit.Ignore;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * Dunit class for testing gemfire function commands : GC, Shutdown
+ *
+ * @author apande
+ */
+public class MiscellaneousCommandsDUnitTest extends CliCommandTestBase {
+
+  private static final long serialVersionUID = 1L;
+  private static String cachedLogLevel;
+
+  public MiscellaneousCommandsDUnitTest(String name) {
+    super(name);
+  }
+
+  @Override
+  public void tearDown2() throws Exception {
+    invokeInEveryVM(new SerializableRunnable("reset log level") {
+      public void run() {
+        if (cachedLogLevel != null) {
+          System.setProperty("gemfire.log-level", cachedLogLevel);
+          cachedLogLevel = null;
+        }
+      }
+    });
+  }
+
+  public void testGCForGroup() {
+    Properties localProps = new Properties();
+    localProps.setProperty(DistributionConfig.NAME_NAME, "Manager");
+    localProps.setProperty(DistributionConfig.GROUPS_NAME, "Group1");
+    createDefaultSetup(localProps);
+    String command = "gc --group=Group1";
+    CommandResult cmdResult = executeCommand(command);
+    cmdResult.resetToFirstLine();
+    if (cmdResult != null) {
+      String cmdResultStr = commandResultToString(cmdResult);
+      getLogWriter().info("testGCForGroup cmdResultStr=" + cmdResultStr + "; cmdResult=" + cmdResult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+      if (cmdResult.getType().equals(ResultData.TYPE_TABULAR)) {
+        TabularResultData table = (TabularResultData) cmdResult.getResultData();
+        List<String> memberNames = table.retrieveAllValues(CliStrings.GC__MSG__MEMBER_NAME);
+        assertEquals(true, memberNames.size() == 1 ? true : false);
+      } else {
+        fail("testGCForGroup failed as CommandResult should be table type");
+      }
+    } else {
+      fail("testGCForGroup failed as did not get CommandResult");
+    }
+  }
+
+  public static String getMemberId() {
+    Cache cache = new GemfireDataCommandsDUnitTest("test").getCache();
+    return cache.getDistributedSystem().getDistributedMember().getId();
+  }
+
+  public void testGCForMemberID() {
+    createDefaultSetup(null);
+    final VM vm1 = Host.getHost(0).getVM(1);
+    final String vm1MemberId = (String) vm1.invoke(MiscellaneousCommandsDUnitTest.class, "getMemberId");
+    String command = "gc --member=" + vm1MemberId;
+    CommandResult cmdResult = executeCommand(command);
+    cmdResult.resetToFirstLine();
+    if (cmdResult != null) {
+      String cmdResultStr = commandResultToString(cmdResult);
+      getLogWriter().info("testGCForMemberID cmdResultStr=" + cmdResultStr);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+      if (cmdResult.getType().equals(ResultData.TYPE_TABULAR)) {
+        TabularResultData table = (TabularResultData) cmdResult.getResultData();
+        List<String> memberNames = table.retrieveAllValues(CliStrings.GC__MSG__MEMBER_NAME);
+        assertEquals(true, memberNames.size() == 1 ? true : false);
+      } else {
+        fail("testGCForGroup failed as CommandResult should be table type");
+      }
+    } else {
+      fail("testGCForCluster failed as did not get CommandResult");
+    }
+  }
+
+  public void testShowLogDefault() throws IOException {
+    Properties props = new Properties();
+    try {
+      props.setProperty("log-file", "testShowLogDefault.log");
+      createDefaultSetup(props);
+      final VM vm1 = Host.getHost(0).getVM(0);
+      final String vm1MemberId = (String) vm1.invoke(MiscellaneousCommandsDUnitTest.class, "getMemberId");
+      String command = "show log --member=" + vm1MemberId;
+      CommandResult cmdResult = executeCommand(command);
+      if (cmdResult != null) {
+        String log = commandResultToString(cmdResult);
+        assertNotNull(log);
+        getLogWriter().info("Show Log is" + log);
+        assertEquals(Result.Status.OK, cmdResult.getStatus());
+      } else {
+        fail("testShowLog failed as did not get CommandResult");
+      }
+    } finally {
+      disconnectAllFromDS();
+    }
+  }
+
+  public void testShowLogNumLines() {
+    Properties props = new Properties();
+    props.setProperty("log-file", "testShowLogNumLines.log");
+    try {
+      createDefaultSetup(props);
+      final VM vm1 = Host.getHost(0).getVM(0);
+      final String vm1MemberId = (String) vm1.invoke(MiscellaneousCommandsDUnitTest.class, "getMemberId");
+      String command = "show log --member=" + vm1MemberId + " --lines=50";
+      CommandResult cmdResult = executeCommand(command);
+      if (cmdResult != null) {
+        String log = commandResultToString(cmdResult);
+        assertNotNull(log);
+        getLogWriter().info("Show Log is" + log);
+        assertEquals(Result.Status.OK, cmdResult.getStatus());
+      } else {
+        fail("testShowLog failed as did not get CommandResult");
+      }
+    } finally {
+      disconnectAllFromDS();
+    }
+  }
+
+  public void testGCForEntireCluster() {
+    setupForGC();
+    String command = "gc";
+    CommandResult cmdResult = executeCommand(command);
+    cmdResult.resetToFirstLine();
+    if (cmdResult != null) {
+      String cmdResultStr = commandResultToString(cmdResult);
+      getLogWriter().info("testGCForEntireCluster cmdResultStr=" + cmdResultStr + "; cmdResult=" + cmdResult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+      if (cmdResult.getType().equals(ResultData.TYPE_TABULAR)) {
+        TabularResultData table = (TabularResultData) cmdResult.getResultData();
+        List<String> memberNames = table.retrieveAllValues(CliStrings.GC__MSG__MEMBER_NAME);
+        assertEquals(3, memberNames.size());
+      } else {
+        fail("testGCForGroup failed as CommandResult should be table type");
+      }
+    } else {
+      fail("testGCForGroup failed as did not get CommandResult");
+    }
+  }
+
+  void setupForGC() {
+    disconnectAllFromDS();
+
+    final VM vm1 = Host.getHost(0).getVM(1);
+    final VM vm2 = Host.getHost(0).getVM(2);
+
+
+    createDefaultSetup(null);
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 10; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+    vm2.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        dataRegionFactory.create("testRegion");
+      }
+    });
+  }
+
+  public void testShutDownWithoutTimeout() {
+
+    addExpectedException("EntryDestroyedException");
+
+    setupForShutDown();
+    ThreadUtils.sleep(2500);
+
+    String command = "shutdown";
+    CommandResult cmdResult = executeCommand(command);
+
+    if (cmdResult != null) {
+      String cmdResultStr = commandResultToString(cmdResult);
+      getLogWriter().info("testShutDownWithoutTimeout cmdResultStr=" + cmdResultStr);
+    }
+
+    verifyShutDown();
+
+    final HeadlessGfsh defaultShell = getDefaultShell();
+
+    // Need for the Gfsh HTTP enablement during shutdown to properly assess the
+    // state of the connection.
+    waitForCriterion(new WaitCriterion() {
+      public boolean done() {
+        return !defaultShell.isConnectedAndReady();
+      }
+
+      public String description() {
+        return "Waits for the shell to disconnect!";
+      }
+    }, 1000, 250, true);
+
+    assertFalse(defaultShell.isConnectedAndReady());
+  }
+
+  @Ignore("Disabled for 52350")
+  public void DISABLED_testShutDownWithTimeout() {
+    setupForShutDown();
+    ThreadUtils.sleep(2500);
+
+    addExpectedException("EntryDestroyedException");
+
+    String command = "shutdown --time-out=15";
+    CommandResult cmdResult = executeCommand(command);
+
+    if (cmdResult != null) {
+      String cmdResultStr = commandResultToString(cmdResult);
+      getLogWriter().info("testShutDownWithTIMEOUT cmdResultStr=" + cmdResultStr);
+    }
+
+    verifyShutDown();
+
+    final HeadlessGfsh defaultShell = getDefaultShell();
+
+    // Need for the Gfsh HTTP enablement during shutdown to properly assess the state of the connection.
+    waitForCriterion(new WaitCriterion() {
+      public boolean done() {
+        return !defaultShell.isConnectedAndReady();
+      }
+
+      public String description() {
+        return "Waits for the shell to disconnect!";
+      }
+    }, 1000, 250, false);
+
+    assertFalse(defaultShell.isConnectedAndReady());
+  }
+
+  public void testShutDownForTIMEOUT() {
+    setupForShutDown();
+    ThreadUtils.sleep(2500);
+    final VM vm0 = Host.getHost(0).getVM(0);
+    vm0.invoke(new SerializableRunnable() {
+      public void run() {
+        System.setProperty("ThrowTimeoutException", "true");
+      }
+    });
+
+
+    String command = "shutdown --time-out=15";
+    CommandResult cmdResult = executeCommand(command);
+
+    if (cmdResult != null) {
+      String cmdResultStr = commandResultToString(cmdResult);
+      getLogWriter().info("testShutDownForTIMEOUT cmdResultStr = " + cmdResultStr);
+      CommandResult result = (CommandResult) ResultBuilder.createInfoResult(CliStrings.SHUTDOWN_TIMEDOUT);
+      String expectedResult = commandResultToString(result);
+      assertEquals(expectedResult, cmdResultStr);
+    }
+    vm0.invoke(new SerializableRunnable() {
+      public void run() {
+        System.clearProperty("ThrowTimeoutException");
+      }
+    });
+  }
+
+  void setupForChangeLogLelvel() {
+    final VM vm0 = Host.getHost(0).getVM(0);
+    final VM vm1 = Host.getHost(0).getVM(1);
+
+    createDefaultSetup(null);
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 10; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+  }
+
+  void setupForShutDown() {
+    final VM vm0 = Host.getHost(0).getVM(0);
+    final VM vm1 = Host.getHost(0).getVM(1);
+
+    System.setProperty(CliStrings.IGNORE_INTERCEPTORS, "true");
+    createDefaultSetup(null);
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 10; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+  }
+
+  void verifyShutDown() {
+    final VM vm0 = Host.getHost(0).getVM(0);
+    final VM vm1 = Host.getHost(0).getVM(1);
+
+    @SuppressWarnings("serial") final SerializableCallable connectedChecker = new SerializableCallable() {
+      @Override
+      public Object call() throws Exception {
+        boolean cacheExists = true;
+        try {
+          Cache cacheInstance = CacheFactory.getAnyInstance();
+          cacheExists = cacheInstance.getDistributedSystem().isConnected();
+        } catch (CacheClosedException e) {
+          cacheExists = false;
+        }
+        return cacheExists;
+      }
+    };
+
+    WaitCriterion waitCriterion = new WaitCriterion() {
+      @Override
+      public boolean done() {
+        return Boolean.FALSE.equals(vm0.invoke(connectedChecker)) && Boolean.FALSE.equals(vm1.invoke(connectedChecker));
+      }
+
+      @Override
+      public String description() {
+        return "Wait for gfsh to get disconnected from Manager.";
+      }
+    };
+    waitForCriterion(waitCriterion, 5000, 200, true);
+
+    assertTrue(Boolean.FALSE.equals(vm1.invoke(connectedChecker)));
+    assertTrue(Boolean.FALSE.equals(vm0.invoke(connectedChecker)));
+  }
+
+  public void testChangeLogLevelForMembers() {
+    final VM vm0 = Host.getHost(0).getVM(0);
+    final VM vm1 = Host.getHost(0).getVM(1);
+
+    setupForChangeLogLelvel();
+
+    String serverName1 = (String) vm0.invoke(new SerializableCallable() {
+      @Override
+      public Object call() throws Exception {
+        cachedLogLevel = System.getProperty("gemfire.log-level");
+        return GemFireCacheImpl.getInstance().getDistributedSystem().getDistributedMember().getId();
+      }
+    });
+
+    String serverName2 = (String) vm1.invoke(new SerializableCallable() {
+      @Override
+      public Object call() throws Exception {
+        cachedLogLevel = System.getProperty("gemfire.log-level");
+        return GemFireCacheImpl.getInstance().getDistributedSystem().getDistributedMember().getId();
+      }
+    });
+
+    String commandString = CliStrings.CHANGE_LOGLEVEL + " --" + CliStrings.CHANGE_LOGLEVEL__LOGLEVEL + "=finer" + " --" + CliStrings.CHANGE_LOGLEVEL__MEMBER + "=" + serverName1 + "," + serverName2;
+
+    CommandResult commandResult = executeCommand(commandString);
+    getLogWriter().info("testChangeLogLevel commandResult=" + commandResult);
+    assertTrue(Status.OK.equals(commandResult.getStatus()));
+    CompositeResultData resultData = (CompositeResultData) commandResult.getResultData();
+    SectionResultData section = resultData.retrieveSection("section");
+    assertNotNull(section);
+    TabularResultData tableRsultData = section.retrieveTable("ChangeLogLevel");
+    assertNotNull(tableRsultData);
+
+    List<String> columns = tableRsultData.retrieveAllValues(CliStrings.CHANGE_LOGLEVEL__COLUMN_MEMBER);
+    List<String> status = tableRsultData.retrieveAllValues(CliStrings.CHANGE_LOGLEVEL__COLUMN_STATUS);
+
+    assertEquals(columns.size(), 2);
+    assertEquals(status.size(), 2);
+
+    assertTrue(columns.contains(serverName1));
+    assertTrue(columns.contains(serverName2));
+    assertTrue(status.contains("true"));
+  }
+
+  public void testChangeLogLevelForGrps() {
+    Properties localProps = new Properties();
+    localProps.setProperty(DistributionConfig.NAME_NAME, "Manager");
+    localProps.setProperty(DistributionConfig.GROUPS_NAME, "Group0");
+
+    final VM vm1 = Host.getHost(0).getVM(1);
+    final VM vm2 = Host.getHost(0).getVM(2);
+    final String grp1 = "Group1";
+    final String grp2 = "Group2";
+
+    createDefaultSetup(localProps);
+
+    String vm1id = (String) vm1.invoke(new SerializableCallable() {
+      @Override
+      public Object call() throws Exception {
+        Properties localProps = new Properties();
+        localProps.setProperty(DistributionConfig.GROUPS_NAME, grp1);
+        getSystem(localProps);
+        Cache cache = getCache();
+        return cache.getDistributedSystem().getDistributedMember().getId();
+      }
+    });
+
+    String vm2id = (String) vm2.invoke(new SerializableCallable() {
+      @Override
+      public Object call() throws Exception {
+        Properties localProps = new Properties();
+        localProps.setProperty(DistributionConfig.GROUPS_NAME, grp2);
+        getSystem(localProps);
+        Cache cache = getCache();
+        return cache.getDistributedSystem().getDistributedMember().getId();
+      }
+    });
+
+    String commandString = CliStrings.CHANGE_LOGLEVEL + " --" + CliStrings.CHANGE_LOGLEVEL__LOGLEVEL + "=finer" + " --" + CliStrings.CHANGE_LOGLEVEL__GROUPS + "=" + grp1 + "," + grp2;
+
+    CommandResult commandResult = executeCommand(commandString);
+    getLogWriter().info("testChangeLogLevelForGrps commandResult=" + commandResult);
+
+    assertTrue(Status.OK.equals(commandResult.getStatus()));
+
+    CompositeResultData resultData = (CompositeResultData) commandResult.getResultData();
+    SectionResultData section = resultData.retrieveSection("section");
+    assertNotNull(section);
+    TabularResultData tableRsultData = section.retrieveTable("ChangeLogLevel");
+    assertNotNull(tableRsultData);
+
+    List<String> columns = tableRsultData.retrieveAllValues(CliStrings.CHANGE_LOGLEVEL__COLUMN_MEMBER);
+    List<String> status = tableRsultData.retrieveAllValues(CliStrings.CHANGE_LOGLEVEL__COLUMN_STATUS);
+
+    assertEquals(columns.size(), 2);
+    assertEquals(status.size(), 2);
+
+    assertTrue(columns.contains(vm1id));
+    assertTrue(columns.contains(vm2id));
+    assertTrue(status.contains("true"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
new file mode 100644
index 0000000..6afa7ee
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart1DUnitTest.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.internal.FileUtil;
+import com.gemstone.gemfire.internal.logging.LogWriterImpl;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
+import dunit.Host;
+import dunit.SerializableRunnable;
+import dunit.VM;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * Dunit class for testing gemfire function commands : export logs
+ *
+ * @author apande
+ */
+
+public class MiscellaneousCommandsExportLogsPart1DUnitTest extends CliCommandTestBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public MiscellaneousCommandsExportLogsPart1DUnitTest(String name) {
+    super(name);
+  }
+
+  public static String getMemberId() {
+    Cache cache = new GemfireDataCommandsDUnitTest("test").getCache();
+    return cache.getDistributedSystem().getDistributedMember().getId();
+  }
+
+  void setupForExportLogs() {
+    final VM vm1 = Host.getHost(0).getVM(1);
+    createDefaultSetup(null);
+
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 5; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+  }
+
+  String getCurrentTimeString() {
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS_z");
+    Date startDate = new Date(System.currentTimeMillis());
+    String formattedStartDate = sf.format(startDate);
+    return ("_" + formattedStartDate);
+  }
+
+  public void testExportLogs() throws IOException {
+    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
+    String end = sf.format(enddate);
+    String dir = getCurrentTimeString();
+
+    setupForExportLogs();
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogs" + dir, null, null, logLevel, false, false, start,
+        end, 1);
+
+    getLogWriter().info("testExportLogs command result =" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogs cmdStringRsult=" + cmdStringRsult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogs failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("./testExportLogs" + dir));
+  }
+
+  public void testExportLogsForMerge() throws IOException {
+    setupForExportLogs();
+    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
+    String end = sf.format(enddate);
+    String dir = getCurrentTimeString();
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForMerge" + dir, null, null, logLevel, false, true,
+        start, end, 1);
+    getLogWriter().info("testExportLogsForMerge command=" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForMerge cmdStringRsult=" + cmdStringRsult);
+
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForMerge failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("./testExportLogsForMerge" + dir));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
new file mode 100644
index 0000000..6a1d86c
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart2DUnitTest.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.internal.FileUtil;
+import com.gemstone.gemfire.internal.logging.LogWriterImpl;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
+import dunit.Host;
+import dunit.SerializableRunnable;
+import dunit.VM;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * Dunit class for testing gemfire function commands : export logs
+ *
+ * @author apande
+ */
+
+public class MiscellaneousCommandsExportLogsPart2DUnitTest extends CliCommandTestBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public MiscellaneousCommandsExportLogsPart2DUnitTest(String name) {
+    super(name);
+  }
+
+  public static String getMemberId() {
+    Cache cache = new GemfireDataCommandsDUnitTest("test").getCache();
+    return cache.getDistributedSystem().getDistributedMember().getId();
+  }
+
+  void setupForExportLogs() {
+    final VM vm1 = Host.getHost(0).getVM(1);
+    createDefaultSetup(null);
+
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 5; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+  }
+
+  String getCurrentTimeString() {
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS_z");
+    Date startDate = new Date(System.currentTimeMillis());
+    String formattedStartDate = sf.format(startDate);
+    return ("_" + formattedStartDate);
+  }
+
+  public void testExportLogsForLogLevel() throws IOException {
+    setupForExportLogs();
+
+    Date startDate = new Date(System.currentTimeMillis() - 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 60 * 1000);
+    String end = sf.format(enddate);
+    String dir = getCurrentTimeString();
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.CONFIG_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForLogLevel" + dir, null, null, logLevel, false,
+        false, start, end, 1);
+
+    getLogWriter().info("testExportLogsForLogLevel command=" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForLogLevel cmdStringRsult=" + cmdStringRsult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForLogLevel failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("testExportLogsForLogLevel" + dir));
+  }
+
+
+  public void testExportLogsForLogLevelWithUPTOLOGLEVEL() throws IOException {
+    setupForExportLogs();
+
+    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
+    String end = sf.format(enddate);
+    String dir = getCurrentTimeString();
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.SEVERE_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForLogLevelWithUPTOLOGLEVEL" + dir, null, null,
+        logLevel, true, false, start, end, 1);
+
+    getLogWriter().info("testExportLogsForLogLevelWithUPTOLOGLEVEL command=" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForLogLevelWithUPTOLOGLEVEL cmdStringRsult=" + cmdStringRsult);
+
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForLogLevelWithUPTOLOGLEVEL failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("testExportLogsForLogLevelWithUPTOLOGLEVEL" + dir));
+  }
+
+  @Override
+  public void tearDown2() throws Exception {
+    super.tearDown2();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
new file mode 100644
index 0000000..1c2933d
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart3DUnitTest.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.distributed.internal.DistributionConfig;
+import com.gemstone.gemfire.internal.FileUtil;
+import com.gemstone.gemfire.internal.logging.LogWriterImpl;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
+import dunit.Host;
+import dunit.SerializableRunnable;
+import dunit.VM;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Properties;
+
+/**
+ * Dunit class for testing gemfire function commands : export logs
+ *
+ * @author apande
+ */
+
+public class MiscellaneousCommandsExportLogsPart3DUnitTest extends CliCommandTestBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public MiscellaneousCommandsExportLogsPart3DUnitTest(String name) {
+    super(name);
+  }
+
+  public static String getMemberId() {
+    Cache cache = new GemfireDataCommandsDUnitTest("test").getCache();
+    return cache.getDistributedSystem().getDistributedMember().getId();
+  }
+
+  void setupForExportLogs() {
+    final VM vm1 = Host.getHost(0).getVM(1);
+    createDefaultSetup(null);
+
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 5; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+  }
+
+  String getCurrentTimeString() {
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS_z");
+    Date startDate = new Date(System.currentTimeMillis());
+    String formattedStartDate = sf.format(startDate);
+    return ("_" + formattedStartDate);
+  }
+
+  public void testExportLogsForGroup() throws IOException {
+    Properties localProps = new Properties();
+    localProps.setProperty(DistributionConfig.NAME_NAME, "Manager");
+    localProps.setProperty(DistributionConfig.GROUPS_NAME, "Group1");
+    createDefaultSetup(localProps);
+    String dir = getCurrentTimeString();
+
+    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
+    String end = sf.format(enddate);
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+    String[] groups = new String[1];
+    groups[0] = "Group1";
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForGroup" + dir, groups, null, logLevel, false,
+        false, start, end, 1);
+
+    getLogWriter().info("testExportLogsForGroup command result =" + cmdResult);
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForGroup cmdStringRsult=" + cmdStringRsult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForGroup failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("testExportLogsForGroup" + dir));
+  }
+
+  public void testExportLogsForMember() throws IOException {
+    createDefaultSetup(null);
+
+    Date startDate = new Date(System.currentTimeMillis() - 2 * 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 2 * 60 * 60 * 1000);
+    String end = sf.format(enddate);
+
+    final VM vm1 = Host.getHost(0).getVM(1);
+    final String vm1MemberId = (String) vm1.invoke(MiscellaneousCommandsDUnitTest.class, "getMemberId");
+    String dir = getCurrentTimeString();
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForMember" + dir, null, vm1MemberId, logLevel,
+        false, false, start, end, 1);
+
+    getLogWriter().info("testExportLogsForMember command result =" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForMember cmdStringRsult=" + cmdStringRsult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForMember failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("testExportLogsForMember" + dir));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/eddef322/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
new file mode 100644
index 0000000..da12c6e
--- /dev/null
+++ b/gemfire-core/src/test/java/com/gemstone/gemfire/management/internal/cli/commands/MiscellaneousCommandsExportLogsPart4DUnitTest.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.gemstone.gemfire.management.internal.cli.commands;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.internal.FileUtil;
+import com.gemstone.gemfire.internal.logging.LogWriterImpl;
+import com.gemstone.gemfire.management.cli.Result;
+import com.gemstone.gemfire.management.internal.cli.result.CommandResult;
+import dunit.Host;
+import dunit.SerializableRunnable;
+import dunit.VM;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+/**
+ * Dunit class for testing gemfire function commands : export logs
+ *
+ * @author apande
+ */
+public class MiscellaneousCommandsExportLogsPart4DUnitTest extends CliCommandTestBase {
+
+  private static final long serialVersionUID = 1L;
+
+  public MiscellaneousCommandsExportLogsPart4DUnitTest(String name) {
+    super(name);
+  }
+
+  public static String getMemberId() {
+    Cache cache = new GemfireDataCommandsDUnitTest("test").getCache();
+    return cache.getDistributedSystem().getDistributedMember().getId();
+  }
+
+  @Override
+  public void tearDown2() throws Exception {
+    super.tearDown2();
+  }
+
+  void setupForExportLogs() {
+    final VM vm1 = Host.getHost(0).getVM(1);
+    createDefaultSetup(null);
+
+    vm1.invoke(new SerializableRunnable() {
+      public void run() {
+        // no need to close cache as it will be closed as part of teardown2
+        Cache cache = getCache();
+
+        RegionFactory<Integer, Integer> dataRegionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+        Region region = dataRegionFactory.create("testRegion");
+        for (int i = 0; i < 5; i++) {
+          region.put("key" + (i + 200), "value" + (i + 200));
+        }
+      }
+    });
+  }
+
+  String getCurrentTimeString() {
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS_z");
+    Date startDate = new Date(System.currentTimeMillis());
+    String formattedStartDate = sf.format(startDate);
+    return ("_" + formattedStartDate);
+  }
+
+  public void testExportLogsForTimeRange1() throws IOException {
+    setupForExportLogs();
+    Date startDate = new Date(System.currentTimeMillis() - 1 * 60 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd");
+    String start = sf.format(startDate);
+
+    Date enddate = new Date(System.currentTimeMillis() + 1 * 60 * 60 * 1000);
+    String end = sf.format(enddate);
+    String dir = getCurrentTimeString();
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForTimeRange1" + dir, null, null, logLevel, false,
+        false, start, end, 1);
+
+    getLogWriter().info("testExportLogsForTimeRange1 command result =" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForTimeRange1 cmdStringRsult=" + cmdStringRsult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForTimeRange1 failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("testExportLogsForTimeRange1" + dir));
+  }
+
+  public void testExportLogsForTimeRangeForOnlyStartTime() throws IOException {
+    setupForExportLogs();
+    Date date = new Date();
+    date.setTime(System.currentTimeMillis() - 30 * 1000);
+    SimpleDateFormat sf = new SimpleDateFormat("yyyy/MM/dd/HH:mm");
+    String s = sf.format(date);
+    String dir = getCurrentTimeString();
+
+    String logLevel = LogWriterImpl.levelToString(LogWriterImpl.INFO_LEVEL);
+
+    MiscellaneousCommands misc = new MiscellaneousCommands();
+    getCache();
+
+    Result cmdResult = misc.exportLogsPreprocessing("./testExportLogsForTimeRangeForOnlyStartTime" + dir, null, null,
+        logLevel, false, false, s, null, 1);
+
+    getLogWriter().info("testExportLogsForTimeRangeForOnlyStartTime command result =" + cmdResult);
+
+    if (cmdResult != null) {
+      String cmdStringRsult = commandResultToString((CommandResult) cmdResult);
+      getLogWriter().info("testExportLogsForTimeRangeForOnlyStartTime cmdStringRsult=" + cmdStringRsult);
+      assertEquals(Result.Status.OK, cmdResult.getStatus());
+    } else {
+      fail("testExportLogsForTimeRangeForOnlyStartTime failed as did not get CommandResult");
+    }
+    FileUtil.delete(new File("testExportLogsForTimeRangeForOnlyStartTime" + dir));
+  }
+}
\ No newline at end of file


Mime
View raw message