geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From n...@apache.org
Subject geode git commit: GEODE-2530: Added DUnit tests to test Lucene pagination in persistent PR.
Date Fri, 24 Feb 2017 01:12:52 GMT
Repository: geode
Updated Branches:
  refs/heads/develop 42f733fc3 -> c35f442c2


GEODE-2530: Added DUnit tests to test Lucene pagination in persistent PR.

	* when the only dataStore goes offline while paginating results
	* when one dataStore goes offline while one is still operating while paginating
	* alternatively shutting and starting dataStores while paginating results


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/c35f442c
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/c35f442c
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/c35f442c

Branch: refs/heads/develop
Commit: c35f442c217eb867f8550890b6809dfb90a92344
Parents: 42f733f
Author: nabarun <nnag@pivotal.io>
Authored: Thu Feb 23 14:10:37 2017 -0800
Committer: nabarun <nnag@pivotal.io>
Committed: Thu Feb 23 16:35:11 2017 -0800

----------------------------------------------------------------------
 .../geode/cache/lucene/LuceneDUnitTest.java     |   2 +
 .../geode/cache/lucene/PaginationDUnitTest.java | 207 +++++++++++++++++++
 2 files changed, 209 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/c35f442c/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
index 6d8278e..db48294 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
@@ -75,6 +75,8 @@ public abstract class LuceneDUnitTest extends JUnit4CacheTestCase {
 
   public enum RegionTestableType {
     PARTITION(RegionShortcut.PARTITION_PROXY, RegionShortcut.PARTITION),
+    PARTITION_REDUNDANT_PERSISTENT(RegionShortcut.PARTITION_PROXY_REDUNDANT,
+        RegionShortcut.PARTITION_REDUNDANT_PERSISTENT),
     PARTITION_PERSISTENT(RegionShortcut.PARTITION_PROXY, RegionShortcut.PARTITION_PERSISTENT),
     PARTITION_REDUNDANT(RegionShortcut.PARTITION_PROXY_REDUNDANT,
         RegionShortcut.PARTITION_REDUNDANT),

http://git-wip-us.apache.org/repos/asf/geode/blob/c35f442c/geode-lucene/src/test/java/org/apache/geode/cache/lucene/PaginationDUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/PaginationDUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/PaginationDUnitTest.java
new file mode 100644
index 0000000..cfde4f2
--- /dev/null
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/PaginationDUnitTest.java
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information
regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version
2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain
a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under
the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express
+ * or implied. See the License for the specific language governing permissions and limitations
under
+ * the License.
+ */
+package org.apache.geode.cache.lucene;
+
+import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.INDEX_NAME;
+import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.REGION_NAME;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.Region;
+import org.apache.geode.test.dunit.Assert;
+import org.apache.geode.test.dunit.SerializableRunnableIF;
+import org.apache.geode.test.junit.categories.DistributedTest;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import junitparams.JUnitParamsRunner;
+import junitparams.Parameters;
+
+@Category(DistributedTest.class)
+@RunWith(JUnitParamsRunner.class)
+public class PaginationDUnitTest extends LuceneQueriesAccessorBase {
+  protected final static int PAGE_SIZE = 2;
+  protected final static int FLUSH_WAIT_TIME_MS = 60000;
+
+  @Override
+  protected RegionTestableType[] getListOfRegionTestTypes() {
+    return new RegionTestableType[] {RegionTestableType.PARTITION_REDUNDANT_PERSISTENT};
+  }
+
+  protected void putEntryInEachBucket() {
+    accessor.invoke(() -> {
+      final Cache cache = getCache();
+      Region<Object, Object> region = cache.getRegion(REGION_NAME);
+      IntStream.range(0, NUM_BUCKETS).forEach(i -> region.put(i, new TestObject("hello
world")));
+    });
+  }
+
+  @Test
+  @Parameters(method = "getListOfRegionTestTypes")
+  public void noSuchElementExceptionWhenAllDataStoreAreClosedWhilePagination(
+      RegionTestableType regionTestType) {
+    SerializableRunnableIF createIndex = () -> {
+      LuceneService luceneService = LuceneServiceProvider.get(getCache());
+      luceneService.createIndex(INDEX_NAME, REGION_NAME, "text");
+    };
+
+
+    dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
+    accessor.invoke(() -> initAccessor(createIndex, regionTestType));
+
+    putEntryInEachBucket();
+
+    assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore1, FLUSH_WAIT_TIME_MS));
+
+    accessor.invoke(() -> {
+      Cache cache = getCache();
+      LuceneService service = LuceneServiceProvider.get(cache);
+      LuceneQuery<Integer, TestObject> query;
+      query = service.createLuceneQueryFactory().setResultLimit(1000).setPageSize(PAGE_SIZE)
+          .create(INDEX_NAME, REGION_NAME, "world", "text");
+      PageableLuceneQueryResults<Integer, TestObject> pages = query.findPages();
+      assertTrue(pages.hasNext());
+      List<LuceneResultStruct<Integer, TestObject>> page = pages.next();
+      assertEquals(page.size(), PAGE_SIZE, page.size());
+      dataStore1.invoke(() -> closeCache());
+      try {
+        page = pages.next();
+        fail();
+      } catch (Exception e) {
+        Assert.assertEquals(
+            "Expected Exception = java.util.NoSuchElementException but hit " + e.toString(),
true,
+            e instanceof java.util.NoSuchElementException);
+      }
+    });
+  }
+
+
+  @Test
+  @Parameters(method = "getListOfRegionTestTypes")
+  public void noExceptionWhenOneDataStoreIsClosedButOneIsStillUpWhilePagination(
+      RegionTestableType regionTestType) {
+    SerializableRunnableIF createIndex = () -> {
+      LuceneService luceneService = LuceneServiceProvider.get(getCache());
+      luceneService.createIndex(INDEX_NAME, REGION_NAME, "text");
+    };
+
+
+    dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
+    dataStore2.invoke(() -> initDataStore(createIndex, regionTestType));
+    accessor.invoke(() -> initAccessor(createIndex, regionTestType));
+
+    putEntryInEachBucket();
+
+    assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore1, FLUSH_WAIT_TIME_MS));
+
+    accessor.invoke(() -> {
+      List<LuceneResultStruct<Integer, TestObject>> combinedResult = new ArrayList<>();
+      Cache cache = getCache();
+      LuceneService service = LuceneServiceProvider.get(cache);
+      LuceneQuery<Integer, TestObject> query;
+      query = service.createLuceneQueryFactory().setResultLimit(1000).setPageSize(PAGE_SIZE)
+          .create(INDEX_NAME, REGION_NAME, "world", "text");
+      PageableLuceneQueryResults<Integer, TestObject> pages = query.findPages();
+      assertTrue(pages.hasNext());
+      List<LuceneResultStruct<Integer, TestObject>> page = pages.next();
+      combinedResult.addAll(page);
+      assertEquals(PAGE_SIZE, page.size());
+      dataStore1.invoke(() -> closeCache());
+      for (int i = 0; i < ((NUM_BUCKETS / PAGE_SIZE) - 1); i++) {
+        page = pages.next();
+        assertEquals(PAGE_SIZE, page.size());
+        combinedResult.addAll(page);
+      }
+      validateTheCombinedResult(combinedResult);
+    });
+  }
+
+  @Test
+  @Parameters(method = "getListOfRegionTestTypes")
+  public void alternativelyCloseDataStoresAfterGettingAPageAndThenValidateTheContentsOfTheResults(
+      RegionTestableType regionTestType) {
+    SerializableRunnableIF createIndex = () -> {
+      LuceneService luceneService = LuceneServiceProvider.get(getCache());
+      luceneService.createIndex(INDEX_NAME, REGION_NAME, "text");
+    };
+
+    dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
+    dataStore2.invoke(() -> initDataStore(createIndex, regionTestType));
+    accessor.invoke(() -> initAccessor(createIndex, regionTestType));
+
+    putEntryInEachBucket();
+
+    assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore1, FLUSH_WAIT_TIME_MS));
+
+    accessor.invoke(() -> {
+      List<LuceneResultStruct<Integer, TestObject>> combinedResult =
+          new ArrayList<LuceneResultStruct<Integer, TestObject>>();
+      Cache cache = getCache();
+      LuceneService service = LuceneServiceProvider.get(cache);
+      LuceneQuery<Integer, TestObject> query;
+      query = service.createLuceneQueryFactory().setResultLimit(1000).setPageSize(PAGE_SIZE)
+          .create(INDEX_NAME, REGION_NAME, "world", "text");
+      PageableLuceneQueryResults<Integer, TestObject> pages = query.findPages();
+      assertTrue(pages.hasNext());
+
+      dataStore1.invoke(() -> closeCache());
+      dataStore2.invoke(() -> closeCache());
+
+      for (int i = 0; i < (NUM_BUCKETS / PAGE_SIZE); i++) {
+        List<LuceneResultStruct<Integer, TestObject>> page;
+        if (i % 2 == 0) {
+          // Bring up dataStore2 and shutdown dataStore1
+          dataStore2.invoke(() -> initDataStore(createIndex, regionTestType));
+          assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore2, FLUSH_WAIT_TIME_MS));
+          dataStore1.invoke(() -> closeCache());
+          page = pages.next();
+        } else {
+          // Bring up dataStore1 and shutdown dataStore2
+          dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
+          assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore1, FLUSH_WAIT_TIME_MS));
+          dataStore2.invoke(() -> closeCache());
+          page = pages.next();
+        }
+        assertEquals(PAGE_SIZE, page.size());
+        combinedResult.addAll(page);
+      }
+      validateTheCombinedResult(combinedResult);
+    });
+  }
+
+  private void validateTheCombinedResult(
+      final List<LuceneResultStruct<Integer, TestObject>> combinedResult) {
+    Map<Integer, TestObject> resultMap = combinedResult.stream()
+        .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
+    assertEquals(NUM_BUCKETS, resultMap.size());
+
+    for (int i = 0; i < NUM_BUCKETS; i++) {
+      assertEquals("The aggregate result does not contain the key = " + i, true,
+          resultMap.containsKey(i));
+      assertEquals(new TestObject("hello world"), resultMap.get(i));
+    }
+  }
+
+}
+
+


Mime
View raw message