geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ash...@apache.org
Subject incubator-geode git commit: Add LuceneIndex persistence and recovery tests
Date Fri, 16 Oct 2015 16:25:54 GMT
Repository: incubator-geode
Updated Branches:
  refs/heads/feature/GEODE-11 09c9342ce -> 0f7833016


Add LuceneIndex persistence and recovery tests


Project: http://git-wip-us.apache.org/repos/asf/incubator-geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-geode/commit/0f783301
Tree: http://git-wip-us.apache.org/repos/asf/incubator-geode/tree/0f783301
Diff: http://git-wip-us.apache.org/repos/asf/incubator-geode/diff/0f783301

Branch: refs/heads/feature/GEODE-11
Commit: 0f7833016b4767eafa675bdd3ea6633c0b36186e
Parents: 09c9342
Author: Ashvin Agrawal <ashvin@apache.org>
Authored: Thu Oct 15 22:39:06 2015 -0700
Committer: Ashvin Agrawal <ashvin@apache.org>
Committed: Thu Oct 15 22:40:57 2015 -0700

----------------------------------------------------------------------
 .../LuceneIndexRecoveryHAJUnitTest.java         | 182 +++++++++++++++++++
 .../internal/LuceneRebalanceJUnitTest.java      |  85 ---------
 .../LuceneFunctionReadPathDUnitTest.java        |   3 -
 .../internal/repository/serializer/Type1.java   |  23 ++-
 4 files changed, 197 insertions(+), 96 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/0f783301/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneIndexRecoveryHAJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneIndexRecoveryHAJUnitTest.java
b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneIndexRecoveryHAJUnitTest.java
new file mode 100644
index 0000000..3ee1345
--- /dev/null
+++ b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneIndexRecoveryHAJUnitTest.java
@@ -0,0 +1,182 @@
+package com.gemstone.gemfire.cache.lucene.internal;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import com.gemstone.gemfire.cache.Cache;
+import com.gemstone.gemfire.cache.CacheFactory;
+import com.gemstone.gemfire.cache.EvictionAction;
+import com.gemstone.gemfire.cache.EvictionAlgorithm;
+import com.gemstone.gemfire.cache.PartitionAttributes;
+import com.gemstone.gemfire.cache.PartitionAttributesFactory;
+import com.gemstone.gemfire.cache.Region;
+import com.gemstone.gemfire.cache.RegionFactory;
+import com.gemstone.gemfire.cache.RegionShortcut;
+import com.gemstone.gemfire.cache.lucene.LuceneQuery;
+import com.gemstone.gemfire.cache.lucene.LuceneQueryResults;
+import com.gemstone.gemfire.cache.lucene.LuceneService;
+import com.gemstone.gemfire.cache.lucene.LuceneServiceProvider;
+import com.gemstone.gemfire.cache.lucene.internal.repository.IndexRepository;
+import com.gemstone.gemfire.cache.lucene.internal.repository.RepositoryManager;
+import com.gemstone.gemfire.cache.lucene.internal.repository.serializer.HeterogenousLuceneSerializer;
+import com.gemstone.gemfire.cache.lucene.internal.repository.serializer.Type1;
+import com.gemstone.gemfire.internal.cache.BucketNotFoundException;
+import com.gemstone.gemfire.internal.cache.EvictionAttributesImpl;
+import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
+import com.gemstone.gemfire.internal.cache.PartitionedRegion;
+import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
+
+@Category(IntegrationTest.class)
+public class LuceneIndexRecoveryHAJUnitTest {
+  private static final String INDEX = "index";
+  private static final String REGION = "indexedRegion";
+  String[] indexedFields = new String[] { "txt" };
+  HeterogenousLuceneSerializer mapper = new HeterogenousLuceneSerializer(indexedFields);
+  Analyzer analyzer = new StandardAnalyzer();
+
+  Cache cache;
+
+  @Before
+  public void setup() {
+    indexedFields = new String[] { "txt" };
+    mapper = new HeterogenousLuceneSerializer(indexedFields);
+    analyzer = new StandardAnalyzer();
+    LuceneServiceImpl.registerDataSerializables();
+
+    cache = new CacheFactory().set("mcast-port", "0").create();
+  }
+
+  @After
+  public void tearDown() {
+    Cache cache = GemFireCacheImpl.getInstance();
+    if (cache != null) {
+      cache.close();
+    }
+  }
+
+  /**
+   * On rebalance, new repository manager will be created. It will try to read fileRegion
and construct index. This test
+   * simulates the same.
+   */
+  @Test
+  public void recoverRepoInANewNode() throws BucketNotFoundException, IOException {
+    PartitionAttributes<String, String> attrs = new PartitionAttributesFactory().setTotalNumBuckets(1).create();
+    RegionFactory<String, String> regionfactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+    regionfactory.setPartitionAttributes(attrs);
+
+    PartitionedRegion userRegion = (PartitionedRegion) regionfactory.create("userRegion");
+    // put an entry to create the bucket
+    userRegion.put("rebalance", "test");
+
+    PartitionedRegion fileRegion = (PartitionedRegion) regionfactory.create("fileRegion");
+    PartitionedRegion chunkRegion = (PartitionedRegion) regionfactory.create("chunkRegion");
+
+    RepositoryManager manager = new PartitionedRepositoryManager(userRegion, fileRegion,
chunkRegion, mapper, analyzer);
+    IndexRepository repo = manager.getRepository(userRegion, 0, null);
+    assertNotNull(repo);
+
+    repo.create("rebalance", "test");
+    repo.commit();
+
+    // close the region to simulate bucket movement. New node will create repo using data
persisted by old region
+    userRegion.close();
+
+    userRegion = (PartitionedRegion) regionfactory.create("userRegion");
+    userRegion.put("rebalance", "test");
+    manager = new PartitionedRepositoryManager(userRegion, fileRegion, chunkRegion, mapper,
analyzer);
+    IndexRepository newRepo = manager.getRepository(userRegion, 0, null);
+
+    Assert.assertNotEquals(newRepo, repo);
+  }
+
+  @Test
+  public void recoverPersistentIndex() throws Exception {
+    LuceneService service = LuceneServiceProvider.get(cache);
+    service.createIndex(INDEX, REGION, Type1.fields);
+
+    RegionFactory<String, Type1> regionFactory = cache.createRegionFactory(RegionShortcut.PARTITION_PERSISTENT);
+    Region<String, Type1> userRegion = regionFactory.create(REGION);
+
+    Type1 value = new Type1("hello world", 1, 2L, 3.0, 4.0f);
+    userRegion.put("value1", value);
+    value = new Type1("test world", 1, 2L, 3.0, 4.0f);
+    userRegion.put("value2", value);
+    value = new Type1("lucene world", 1, 2L, 3.0, 4.0f);
+    userRegion.put("value3", value);
+
+    // TODO flush queue
+    TimeUnit.MILLISECONDS.sleep(500);
+
+    LuceneQuery<Integer, Type1> query = service.createLuceneQueryFactory().create(INDEX,
REGION, "s:world");
+    LuceneQueryResults<Integer, Type1> results = query.search();
+    Assert.assertEquals(3, results.size());
+
+    // close the cache and all the regions
+    cache.close();
+
+    cache = new CacheFactory().set("mcast-port", "0").create();
+    service = LuceneServiceProvider.get(cache);
+    service.createIndex(INDEX, REGION, Type1.fields);
+    regionFactory = cache.createRegionFactory(RegionShortcut.PARTITION_PERSISTENT);
+    userRegion = regionFactory.create(REGION);
+
+    query = service.createLuceneQueryFactory().create(INDEX, REGION, "s:world");
+    results = query.search();
+    Assert.assertEquals(3, results.size());
+
+    String aeqId = LuceneServiceImpl.getUniqueIndexName(INDEX, REGION);
+    PartitionedRegion chunkRegion = (PartitionedRegion) cache.getRegion(aeqId + ".chunks");
+    assertNotNull(chunkRegion);
+    chunkRegion.destroyRegion();
+    PartitionedRegion fileRegion = (PartitionedRegion) cache.getRegion(aeqId + ".files");
+    assertNotNull(fileRegion);
+    fileRegion.destroyRegion();
+    userRegion.destroyRegion();
+  }
+
+  @Test
+  public void overflowRegionIndex() throws Exception {
+    String aeqId = LuceneServiceImpl.getUniqueIndexName(INDEX, REGION);
+
+    LuceneService service = LuceneServiceProvider.get(cache);
+    service.createIndex(INDEX, REGION, Type1.fields);
+
+    RegionFactory<String, Type1> regionFactory = cache.createRegionFactory(RegionShortcut.PARTITION);
+    EvictionAttributesImpl evicAttr = new EvictionAttributesImpl().setAction(EvictionAction.OVERFLOW_TO_DISK);
+    evicAttr.setAlgorithm(EvictionAlgorithm.LRU_ENTRY).setMaximum(1);
+    regionFactory.setEvictionAttributes(evicAttr);
+
+    PartitionedRegion userRegion = (PartitionedRegion) regionFactory.create(REGION);
+    Assert.assertEquals(0, userRegion.getDiskRegionStats().getNumOverflowOnDisk());
+
+    Type1 value = new Type1("hello world", 1, 2L, 3.0, 4.0f);
+    userRegion.put("value1", value);
+    value = new Type1("test world", 1, 2L, 3.0, 4.0f);
+    userRegion.put("value2", value);
+    value = new Type1("lucene world", 1, 2L, 3.0, 4.0f);
+    userRegion.put("value3", value);
+
+    // TODO flush queue
+    TimeUnit.MILLISECONDS.sleep(500);
+
+    PartitionedRegion fileRegion = (PartitionedRegion) cache.getRegion(aeqId + ".files");
+    assertNotNull(fileRegion);
+    PartitionedRegion chunkRegion = (PartitionedRegion) cache.getRegion(aeqId + ".chunks");
+    assertNotNull(chunkRegion);
+    Assert.assertTrue(0 < userRegion.getDiskRegionStats().getNumOverflowOnDisk());
+
+    LuceneQuery<Integer, Type1> query = service.createLuceneQueryFactory().create(INDEX,
REGION, "s:world");
+    LuceneQueryResults<Integer, Type1> results = query.search();
+    Assert.assertEquals(3, results.size());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/0f783301/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneRebalanceJUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneRebalanceJUnitTest.java
b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneRebalanceJUnitTest.java
deleted file mode 100644
index afddb4a..0000000
--- a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/LuceneRebalanceJUnitTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-package com.gemstone.gemfire.cache.lucene.internal;
-
-import static org.junit.Assert.*;
-
-import java.io.IOException;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import com.gemstone.gemfire.cache.Cache;
-import com.gemstone.gemfire.cache.CacheFactory;
-import com.gemstone.gemfire.cache.PartitionAttributes;
-import com.gemstone.gemfire.cache.PartitionAttributesFactory;
-import com.gemstone.gemfire.cache.RegionFactory;
-import com.gemstone.gemfire.cache.RegionShortcut;
-import com.gemstone.gemfire.cache.lucene.internal.repository.IndexRepository;
-import com.gemstone.gemfire.cache.lucene.internal.repository.RepositoryManager;
-import com.gemstone.gemfire.cache.lucene.internal.repository.serializer.HeterogenousLuceneSerializer;
-import com.gemstone.gemfire.internal.cache.BucketNotFoundException;
-import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
-import com.gemstone.gemfire.internal.cache.PartitionedRegion;
-import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
-
-@Category(IntegrationTest.class)
-public class LuceneRebalanceJUnitTest {
-  String[] indexedFields = new String[] { "txt" };
-  HeterogenousLuceneSerializer mapper = new HeterogenousLuceneSerializer(indexedFields);
-  Analyzer analyzer = new StandardAnalyzer();
-
-  @Before
-  public void setup() {
-    indexedFields = new String[] { "txt" };
-    mapper = new HeterogenousLuceneSerializer(indexedFields);
-    analyzer = new StandardAnalyzer();
-    LuceneServiceImpl.registerDataSerializables();
-  }
-  
-  @After
-  public void tearDown() {
-    Cache cache = GemFireCacheImpl.getInstance();
-    if(cache != null) {
-      cache.close();
-    }
-  }
-
-  /**
-   * Test what happens when a bucket is destroyed.
-   */
-  @Test
-  public void recoverRepoInANewNode() throws BucketNotFoundException, IOException {
-    Cache cache = new CacheFactory().set("mcast-port", "0").create();
-    PartitionAttributes<String, String> attrs = new PartitionAttributesFactory().setTotalNumBuckets(1).create();
-    RegionFactory<String, String> regionfactory = cache.createRegionFactory(RegionShortcut.PARTITION);
-    regionfactory.setPartitionAttributes(attrs);
-
-    PartitionedRegion userRegion = (PartitionedRegion) regionfactory.create("userRegion");
-    // put an entry to create the bucket
-    userRegion.put("rebalance", "test");
-
-    PartitionedRegion fileRegion = (PartitionedRegion) regionfactory.create("fileRegion");
-    PartitionedRegion chunkRegion = (PartitionedRegion) regionfactory.create("chunkRegion");
-
-    RepositoryManager manager = new PartitionedRepositoryManager(userRegion, fileRegion,
chunkRegion, mapper, analyzer);
-    IndexRepository repo = manager.getRepository(userRegion, 0, null);
-    assertNotNull(repo);
-
-    repo.create("rebalance", "test");
-    repo.commit();
-
-    // close the region to simulate bucket movement. New node will create repo using data
persisted by old region
-    userRegion.close();
-
-    userRegion = (PartitionedRegion) regionfactory.create("userRegion");
-    userRegion.put("rebalance", "test");
-    manager = new PartitionedRepositoryManager(userRegion, fileRegion, chunkRegion, mapper,
analyzer);
-    IndexRepository newRepo = manager.getRepository(userRegion, 0, null);
-
-    Assert.assertNotEquals(newRepo, repo);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/0f783301/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/distributed/LuceneFunctionReadPathDUnitTest.java
----------------------------------------------------------------------
diff --git a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/distributed/LuceneFunctionReadPathDUnitTest.java
b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/distributed/LuceneFunctionReadPathDUnitTest.java
index e5c1a4e..0bf1842 100644
--- a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/distributed/LuceneFunctionReadPathDUnitTest.java
+++ b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/distributed/LuceneFunctionReadPathDUnitTest.java
@@ -179,9 +179,6 @@ public class LuceneFunctionReadPathDUnitTest extends CacheTestCase {
         return false;
       return true;
     }
-
-    
-    
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/0f783301/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/repository/serializer/Type1.java
----------------------------------------------------------------------
diff --git a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/repository/serializer/Type1.java
b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/repository/serializer/Type1.java
index c5d2a5b..b82d0be 100644
--- a/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/repository/serializer/Type1.java
+++ b/gemfire-lucene/src/test/java/com/gemstone/gemfire/cache/lucene/internal/repository/serializer/Type1.java
@@ -1,18 +1,25 @@
 package com.gemstone.gemfire.cache.lucene.internal.repository.serializer;
 
+import java.io.Serializable;
+
 /**
  * A test type to get mapped to a lucene document
  */
-public class Type1 {
-  private String s;
-  private int i;
-  private long l;
-  private double d;
-  private float f;
-  private Object o = new Object();
+public class Type1 implements Serializable {
+  private static final long serialVersionUID = 1L;
+
+  public static final String[] fields = new String[] {"s", "i", "l", "d", "f"};
+  
+  String s;
+  int i;
+  long l;
+  double d;
+  float f;
+  Serializable o = new Serializable() {
+    private static final long serialVersionUID = 1L;
+  };
   
   public Type1(String s, int i, long l, double d, float f) {
-    super();
     this.s = s;
     this.i = i;
     this.l = l;


Mime
View raw message