geode-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ladyva...@apache.org
Subject [geode] 01/01: GEODE-3928: createIndex on existing region creates lucene indexes for existing data
Date Fri, 26 Jan 2018 01:57:25 GMT
This is an automated email from the ASF dual-hosted git repository.

ladyvader pushed a commit to branch feature/GEODE-3928
in repository https://gitbox.apache.org/repos/asf/geode.git

commit 1732bb6d5aac4b994aa279a473b6020b037d931a
Author: Lynn Hughes-Godfrey <lhughesgodfrey@pivotal.io>
AuthorDate: Thu Jan 18 16:03:47 2018 -0800

    GEODE-3928: createIndex on existing region creates lucene indexes for existing data
---
 .../lucene/internal/IndexRepositoryFactory.java    | 71 +++++++++++++++-
 .../cache/lucene/internal/LuceneServiceImpl.java   | 66 +++++++++++++--
 .../lucene/internal/filesystem/FileSystem.java     |  5 +-
 .../apache/geode/cache/lucene/LuceneDUnitTest.java |  8 ++
 .../lucene/LuceneIndexCreationIntegrationTest.java | 12 +--
 .../LuceneIndexMaintenanceIntegrationTest.java     | 10 ++-
 .../geode/cache/lucene/LuceneQueriesDUnitTest.java | 97 ++++++++++++++++++++++
 .../lucene/RebalanceWithRedundancyDUnitTest.java   |  7 +-
 .../PartitionedRepositoryManagerJUnitTest.java     | 22 +++++
 .../management/LuceneManagementDUnitTest.java      |  6 +-
 10 files changed, 281 insertions(+), 23 deletions(-)

diff --git a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
index b825940..99ef788 100644
--- a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
+++ b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
@@ -15,19 +15,31 @@
 package org.apache.geode.cache.lucene.internal;
 
 import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashSet;
+import java.util.Iterator;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.logging.log4j.Logger;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 
+import org.apache.geode.cache.CacheClosedException;
+import org.apache.geode.cache.EntryDestroyedException;
+import org.apache.geode.cache.Region;
 import org.apache.geode.cache.lucene.LuceneSerializer;
 import org.apache.geode.cache.lucene.internal.directory.RegionDirectory;
 import org.apache.geode.cache.lucene.internal.partition.BucketTargetingMap;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepository;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepositoryImpl;
+import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.distributed.DistributedLockService;
+import org.apache.geode.distributed.LockServiceDestroyedException;
 import org.apache.geode.internal.cache.BucketRegion;
+import org.apache.geode.internal.cache.ColocationHelper;
+import org.apache.geode.internal.cache.EntrySnapshot;
+import org.apache.geode.internal.cache.PartitionRegionConfig;
 import org.apache.geode.internal.cache.PartitionedRegion;
 import org.apache.geode.internal.cache.PartitionedRegionHelper;
 import org.apache.geode.internal.logging.LogService;
@@ -36,6 +48,7 @@ public class IndexRepositoryFactory {
 
   private static final Logger logger = LogService.getLogger();
   public static final String FILE_REGION_LOCK_FOR_BUCKET_ID = "FileRegionLockForBucketId:";
+  public static final String APACHE_GEODE_INDEX_COMPLETE = "APACHE_GEODE_INDEX_COMPLETE";
 
   public IndexRepositoryFactory() {}
 
@@ -45,6 +58,14 @@ public class IndexRepositoryFactory {
     LuceneIndexForPartitionedRegion indexForPR = (LuceneIndexForPartitionedRegion) index;
     final PartitionedRegion fileRegion = indexForPR.getFileAndChunkRegion();
 
+    // We need to ensure that all members have created the fileAndChunk region before continuing
+    Region prRoot = PartitionedRegionHelper.getPRRoot(fileRegion.getCache());
+    PartitionRegionConfig prConfig =
+        (PartitionRegionConfig) prRoot.get(fileRegion.getRegionIdentifier());
+    while (!prConfig.isColocationComplete()) {
+      prConfig = (PartitionRegionConfig) prRoot.get(fileRegion.getRegionIdentifier());
+    }
+
     BucketRegion fileAndChunkBucket = getMatchingBucket(fileRegion, bucketId);
     BucketRegion dataBucket = getMatchingBucket(userRegion, bucketId);
     boolean success = false;
@@ -77,25 +98,69 @@ public class IndexRepositoryFactory {
     }
 
     final IndexRepository repo;
+    DefaultQuery.setPdxReadSerialized(true);
     try {
-      RegionDirectory dir = new RegionDirectory(getBucketTargetingMap(fileAndChunkBucket,
bucketId),
-          indexForPR.getFileSystemStats());
+      // bucketTargetingMap handles partition resolver (via bucketId as callbackArg)
+      Map bucketTargetingMap = getBucketTargetingMap(fileAndChunkBucket, bucketId);
+      RegionDirectory dir =
+          new RegionDirectory(bucketTargetingMap, indexForPR.getFileSystemStats());
       IndexWriterConfig config = new IndexWriterConfig(indexForPR.getAnalyzer());
       IndexWriter writer = new IndexWriter(dir, config);
       repo = new IndexRepositoryImpl(fileAndChunkBucket, writer, serializer,
           indexForPR.getIndexStats(), dataBucket, lockService, lockName, indexForPR);
-      success = true;
+      success = false;
+      // fileRegion ops (get/put) need bucketId as a callbackArg for PartitionResolver
+      if (null != fileRegion.get(APACHE_GEODE_INDEX_COMPLETE, bucketId)) {
+        success = true;
+        return repo;
+      } else {
+        Set<IndexRepository> affectedRepos = new HashSet<IndexRepository>();
+
+        Iterator keysIterator = dataBucket.keySet().iterator();
+        while (keysIterator.hasNext()) {
+          Object key = keysIterator.next();
+          Object value = getValue(userRegion.getEntry(key));
+          if (value != null) {
+            repo.update(key, value);
+          } else {
+            repo.delete(key);
+          }
+          affectedRepos.add(repo);
+        }
+
+        for (IndexRepository affectedRepo : affectedRepos) {
+          affectedRepo.commit();
+        }
+        // fileRegion ops (get/put) need bucketId as a callbackArg for PartitionResolver
+        fileRegion.put(APACHE_GEODE_INDEX_COMPLETE, APACHE_GEODE_INDEX_COMPLETE, bucketId);
+        success = true;
+      }
       return repo;
     } catch (IOException e) {
       logger.info("Exception thrown while constructing Lucene Index for bucket:" + bucketId
           + " for file region:" + fileAndChunkBucket.getFullPath());
       throw e;
+    } catch (CacheClosedException e) {
+      logger.info("CacheClosedException thrown while constructing Lucene Index for bucket:"
+          + bucketId + " for file region:" + fileAndChunkBucket.getFullPath());
+      throw e;
     } finally {
       if (!success) {
         lockService.unlock(lockName);
+        DefaultQuery.setPdxReadSerialized(false);
       }
     }
+  }
 
+  private Object getValue(Region.Entry entry) {
+    final EntrySnapshot es = (EntrySnapshot) entry;
+    Object value;
+    try {
+      value = es == null ? null : es.getRawValue(true);
+    } catch (EntryDestroyedException e) {
+      value = null;
+    }
+    return value;
   }
 
   private Map getBucketTargetingMap(BucketRegion region, int bucketId) {
diff --git a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneServiceImpl.java
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneServiceImpl.java
index e7813af..3894e32 100644
--- a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneServiceImpl.java
+++ b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneServiceImpl.java
@@ -15,10 +15,15 @@
 
 package org.apache.geode.cache.lucene.internal;
 
+import static org.apache.geode.cache.asyncqueue.internal.AsyncEventQueueImpl.ASYNC_EVENT_QUEUE_PREFIX;
+
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -31,12 +36,18 @@ import org.apache.logging.log4j.Logger;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.store.AlreadyClosedException;
 
+import org.apache.geode.InternalGemFireError;
+import org.apache.geode.cache.AttributesMutator;
 import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheClosedException;
+import org.apache.geode.cache.EntryDestroyedException;
 import org.apache.geode.cache.EvictionAlgorithm;
 import org.apache.geode.cache.EvictionAttributes;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.RegionAttributes;
+import org.apache.geode.cache.RegionDestroyedException;
 import org.apache.geode.cache.asyncqueue.internal.AsyncEventQueueImpl;
 import org.apache.geode.cache.execute.Execution;
 import org.apache.geode.cache.execute.FunctionService;
@@ -58,15 +69,23 @@ import org.apache.geode.cache.lucene.internal.filesystem.ChunkKey;
 import org.apache.geode.cache.lucene.internal.filesystem.File;
 import org.apache.geode.cache.lucene.internal.management.LuceneServiceMBean;
 import org.apache.geode.cache.lucene.internal.management.ManagementIndexListener;
+import org.apache.geode.cache.lucene.internal.repository.IndexRepository;
 import org.apache.geode.cache.lucene.internal.results.LuceneGetPageFunction;
 import org.apache.geode.cache.lucene.internal.results.PageResults;
 import org.apache.geode.cache.lucene.internal.xml.LuceneServiceXmlGenerator;
+import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.internal.DSFIDFactory;
 import org.apache.geode.internal.DataSerializableFixedID;
+import org.apache.geode.internal.cache.AbstractRegion;
+import org.apache.geode.internal.cache.BucketNotFoundException;
+import org.apache.geode.internal.cache.BucketRegion;
 import org.apache.geode.internal.cache.CacheService;
+import org.apache.geode.internal.cache.EntrySnapshot;
 import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.internal.cache.LocalDataSet;
 import org.apache.geode.internal.cache.PartitionedRegion;
+import org.apache.geode.internal.cache.PrimaryBucketException;
 import org.apache.geode.internal.cache.RegionListener;
 import org.apache.geode.internal.cache.extension.Extensible;
 import org.apache.geode.internal.cache.xmlcache.XmlGenerator;
@@ -228,14 +247,15 @@ public class LuceneServiceImpl implements InternalLuceneService {
       regionPath = "/" + regionPath;
     }
 
+    // We must always register the index (this is where IndexAlreadyExistsException is detected)
     registerDefinedIndex(indexName, regionPath, new LuceneIndexCreationProfile(indexName,
         regionPath, fields, analyzer, fieldAnalyzers, serializer));
 
+    // If the region does not yet exist, install LuceneRegionListener and return
     PartitionedRegion region = (PartitionedRegion) cache.getRegion(regionPath);
-
-    LuceneRegionListener regionListener = new LuceneRegionListener(this, cache, indexName,
-        regionPath, fields, analyzer, fieldAnalyzers, serializer);
     if (region == null) {
+      LuceneRegionListener regionListener = new LuceneRegionListener(this, cache, indexName,
+          regionPath, fields, analyzer, fieldAnalyzers, serializer);
       cache.addRegionListener(regionListener);
       return;
     }
@@ -245,10 +265,9 @@ public class LuceneServiceImpl implements InternalLuceneService {
       throw new IllegalStateException("The lucene index must be created before region");
     }
 
-
+    // do work normally handled by LuceneRegionListener (if region already exists)
     createIndexOnExistingRegion(region, indexName, regionPath, fields, analyzer, fieldAnalyzers,
         serializer);
-
   }
 
   private void createIndexOnExistingRegion(PartitionedRegion region, String indexName,
@@ -266,6 +285,43 @@ public class LuceneServiceImpl implements InternalLuceneService {
         region.getAttributes(), analyzer, fieldAnalyzers, aeqId, serializer, fields);
 
     afterDataRegionCreated(luceneIndex);
+
+    createLuceneIndexOnDataRegion(region, luceneIndex);
+  }
+
+  protected boolean createLuceneIndexOnDataRegion(final PartitionedRegion userRegion,
+      final LuceneIndexImpl luceneIndex) {
+    try {
+      AbstractPartitionedRepositoryManager repositoryManager =
+          (AbstractPartitionedRepositoryManager) luceneIndex.getRepositoryManager();
+      if (userRegion.getDataStore() == null) {
+        return true;
+      }
+      Set<Integer> primaryBucketIds = userRegion.getDataStore().getAllLocalPrimaryBucketIds();
+      Iterator primaryBucketIterator = primaryBucketIds.iterator();
+      while (primaryBucketIterator.hasNext()) {
+        int primaryBucketId = (Integer) primaryBucketIterator.next();
+        try {
+          BucketRegion userBucket = userRegion.getDataStore().getLocalBucketById(primaryBucketId);
+          if (!userBucket.isEmpty()) {
+            repositoryManager.getRepository(primaryBucketId);
+          }
+        } catch (BucketNotFoundException | PrimaryBucketException e) {
+          logger.debug("Bucket ID : " + primaryBucketId
+              + " not found while saving to lucene index: " + e.getMessage(), e);
+        }
+      }
+      return true;
+    } catch (RegionDestroyedException e) {
+      logger.debug("Bucket not found while saving to lucene index: " + e.getMessage(), e);
+      return false;
+    } catch (CacheClosedException e) {
+      logger.debug("Unable to save to lucene index, cache has been closed", e);
+      return false;
+    } catch (AlreadyClosedException e) {
+      logger.debug("Unable to commit, the lucene index is already closed", e);
+      return false;
+    }
   }
 
   static void validateRegionAttributes(RegionAttributes attrs) {
diff --git a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/filesystem/FileSystem.java
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/filesystem/FileSystem.java
index fd8d620..25e0706 100644
--- a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/filesystem/FileSystem.java
+++ b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/filesystem/FileSystem.java
@@ -23,6 +23,7 @@ import java.util.stream.Collectors;
 
 import org.apache.logging.log4j.Logger;
 
+import org.apache.geode.cache.lucene.internal.IndexRepositoryFactory;
 import org.apache.geode.internal.logging.LogService;
 
 /**
@@ -60,7 +61,9 @@ public class FileSystem {
 
   public Collection<String> listFileNames() {
     return (Collection<String>) fileAndChunkRegion.keySet().stream()
-        .filter(entry -> (entry instanceof String)).collect(Collectors.toList());
+        .filter(entry -> ((entry instanceof String) && !((String) entry)
+            .equalsIgnoreCase(IndexRepositoryFactory.APACHE_GEODE_INDEX_COMPLETE)))
+        .collect(Collectors.toList());
   }
 
   public File createFile(final String name) throws IOException {
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
index c3f5dc2..44962f8 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneDUnitTest.java
@@ -61,6 +61,14 @@ public abstract class LuceneDUnitTest extends JUnit4CacheTestCase {
     regionTestType.createAccessor(getCache(), REGION_NAME);
   }
 
+  protected void initDataStore(RegionTestableType regionTestType) throws Exception {
+    regionTestType.createDataStore(getCache(), REGION_NAME);
+  }
+
+  protected void initAccessor(RegionTestableType regionTestType) throws Exception {
+    regionTestType.createAccessor(getCache(), REGION_NAME);
+  }
+
   protected RegionTestableType[] getListOfRegionTestTypes() {
     return new RegionTestableType[] {RegionTestableType.PARTITION,
         RegionTestableType.PARTITION_REDUNDANT, RegionTestableType.PARTITION_OVERFLOW_TO_DISK,
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexCreationIntegrationTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexCreationIntegrationTest.java
index 8d88875..e62acf5 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexCreationIntegrationTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexCreationIntegrationTest.java
@@ -25,8 +25,10 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.function.Consumer;
 import java.util.stream.Collectors;
@@ -94,8 +96,8 @@ public class LuceneIndexCreationIntegrationTest extends LuceneIntegrationTest
{
     region.put("key1", new TestObject());
     verifyIndexFinishFlushing(cache, INDEX_NAME, REGION_NAME);
     assertEquals(analyzers, index.getFieldAnalyzers());
-    assertEquals(Arrays.asList("field1"), field1Analyzer.analyzedfields);
-    assertEquals(Arrays.asList("field2"), field2Analyzer.analyzedfields);
+    assertEquals(new HashSet(Arrays.asList("field1")), field1Analyzer.analyzedfields);
+    assertEquals(new HashSet(Arrays.asList("field2")), field2Analyzer.analyzedfields);
   }
 
   @Test
@@ -171,8 +173,8 @@ public class LuceneIndexCreationIntegrationTest extends LuceneIntegrationTest
{
       region.put("key1", new TestObject());
       verifyIndexFinishFlushing(cache, INDEX_NAME, REGION_NAME);
       assertEquals(analyzers, index.getFieldAnalyzers());
-      assertEquals(Arrays.asList("field1"), field1Analyzer.analyzedfields);
-      assertEquals(Arrays.asList("field2"), field2Analyzer.analyzedfields);
+      assertEquals(new HashSet(Arrays.asList("field1")), field1Analyzer.analyzedfields);
+      assertEquals(new HashSet(Arrays.asList("field2")), field2Analyzer.analyzedfields);
     } finally {
       LuceneServiceImpl.luceneIndexFactory = new LuceneIndexImplFactory();
     }
@@ -316,7 +318,7 @@ public class LuceneIndexCreationIntegrationTest extends LuceneIntegrationTest
{
 
   private static class RecordingAnalyzer extends Analyzer {
 
-    private List<String> analyzedfields = new ArrayList<String>();
+    private Set<String> analyzedfields = new HashSet<String>();
 
     @Override
     protected TokenStreamComponents createComponents(final String fieldName) {
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexMaintenanceIntegrationTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexMaintenanceIntegrationTest.java
index 854bca7..9fe6f4e 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexMaintenanceIntegrationTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneIndexMaintenanceIntegrationTest.java
@@ -15,6 +15,8 @@
 package org.apache.geode.cache.lucene;
 
 import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.*;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 import static org.junit.Assert.*;
 
 import java.io.Serializable;
@@ -153,8 +155,8 @@ public class LuceneIndexMaintenanceIntegrationTest extends LuceneIntegrationTest
     assertEquals(2, results.size());
     LuceneIndexForPartitionedRegion indexForPR = (LuceneIndexForPartitionedRegion) index;
     LuceneIndexStats indexStats = indexForPR.getIndexStats();
-    assertEquals(1, indexStats.getFailedEntries());
-    assertEquals(4, indexStats.getUpdates());
+    assertEquals(2, indexStats.getFailedEntries());
+    assertEquals(8, indexStats.getUpdates());
   }
 
   @Test
@@ -177,7 +179,7 @@ public class LuceneIndexMaintenanceIntegrationTest extends LuceneIntegrationTest
     assertEquals(5, results.size());
     LuceneIndexForPartitionedRegion indexForPR = (LuceneIndexForPartitionedRegion) index;
     LuceneIndexStats indexStats = indexForPR.getIndexStats();
-    assertEquals(10, indexStats.getUpdates());
+    assertThat("indexStats.getUpdates()", indexStats.getUpdates(), greaterThanOrEqualTo(10));
   }
 
   @Test
@@ -269,7 +271,7 @@ public class LuceneIndexMaintenanceIntegrationTest extends LuceneIntegrationTest
     LuceneTestUtilities.resumeSender(cache);
     assertTrue(luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
         TimeUnit.MILLISECONDS));
-    assertEquals(4, index.getIndexStats().getCommits());
+    assertEquals(8, index.getIndexStats().getCommits());
   }
 
   @Test
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneQueriesDUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneQueriesDUnitTest.java
index 08e0e05..cdcf29b 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneQueriesDUnitTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/LuceneQueriesDUnitTest.java
@@ -28,10 +28,16 @@ import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 
 import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.PartitionAttributes;
+import org.apache.geode.cache.PartitionAttributesFactory;
 import org.apache.geode.cache.Region;
+import org.apache.geode.cache.RegionShortcut;
+import org.apache.geode.cache.lucene.internal.LuceneIndexFactoryImpl;
 import org.apache.geode.cache.lucene.internal.LuceneQueryImpl;
+import org.apache.geode.cache.lucene.internal.LuceneServiceImpl;
 import org.apache.geode.cache.lucene.test.LuceneTestUtilities;
 import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.test.dunit.AsyncInvocation;
 import org.apache.geode.test.dunit.SerializableRunnableIF;
 import org.apache.geode.test.dunit.VM;
 import org.apache.geode.test.junit.categories.DistributedTest;
@@ -101,6 +107,97 @@ public class LuceneQueriesDUnitTest extends LuceneQueriesAccessorBase
{
     executeTextSearch(accessor);
   }
 
+  private void destroyIndex() {
+    LuceneService luceneService = LuceneServiceProvider.get(getCache());
+    luceneService.destroyIndex(INDEX_NAME, REGION_NAME);
+  }
+
+  private void recreateIndex() {
+    LuceneService luceneService = LuceneServiceProvider.get(getCache());
+    LuceneIndexFactoryImpl indexFactory =
+        (LuceneIndexFactoryImpl) luceneService.createIndexFactory().addField("text");
+    indexFactory.create(INDEX_NAME, REGION_NAME, true);
+  };
+
+  @Test
+  @Parameters(method = "getListOfRegionTestTypes")
+  public void dropAndRecreateIndex(RegionTestableType regionTestType) throws Exception {
+    SerializableRunnableIF createIndex = () -> {
+      LuceneService luceneService = LuceneServiceProvider.get(getCache());
+      luceneService.createIndexFactory().addField("text").create(INDEX_NAME, REGION_NAME);
+    };
+    dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
+    dataStore2.invoke(() -> initDataStore(createIndex, regionTestType));
+    accessor.invoke(() -> initAccessor(createIndex, regionTestType));
+
+    putDataInRegion(accessor);
+    assertTrue(waitForFlushBeforeExecuteTextSearch(accessor, 60000));
+    assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore1, 60000));
+    executeTextSearch(accessor);
+
+    dataStore1.invoke(() -> destroyIndex());
+
+    // re-index stored data
+    AsyncInvocation ai1 = dataStore1.invokeAsync(() -> {
+      recreateIndex();
+    });
+
+    // re-index stored data
+    AsyncInvocation ai2 = dataStore2.invokeAsync(() -> {
+      recreateIndex();
+    });
+
+    ai1.join();
+    ai2.join();
+
+    ai1.checkException();
+    ai2.checkException();
+
+    executeTextSearch(accessor);
+  }
+
+  @Test
+  @Parameters(method = "getListOfRegionTestTypes")
+  public void reindexThenQuery(RegionTestableType regionTestType) throws Exception {
+    SerializableRunnableIF createIndex = () -> {
+      LuceneService luceneService = LuceneServiceProvider.get(getCache());
+      LuceneIndexFactoryImpl indexFactory =
+          (LuceneIndexFactoryImpl) luceneService.createIndexFactory().addField("text");
+      indexFactory.create(INDEX_NAME, REGION_NAME, true);
+    };
+
+    // Create dataRegion prior to index
+    dataStore1.invoke(() -> initDataStore(regionTestType));
+    dataStore2.invoke(() -> initDataStore(regionTestType));
+    accessor.invoke(() -> initAccessor(regionTestType));
+
+    // populate region
+    accessor.invoke(() -> {
+      Region<Object, Object> region = getCache().getRegion(REGION_NAME);
+      region.put(1, new TestObject("hello world"));
+      region.put(113, new TestObject("hi world"));
+      region.put(2, new TestObject("goodbye world"));
+    });
+
+    // re-index stored data
+    AsyncInvocation ai1 = dataStore1.invokeAsync(() -> {
+      recreateIndex();
+    });
+
+    // re-index stored data
+    AsyncInvocation ai2 = dataStore2.invokeAsync(() -> {
+      recreateIndex();
+    });
+
+    ai1.join();
+    ai2.join();
+
+    ai1.checkException();
+    ai2.checkException();
+
+    executeTextSearch(accessor);
+  }
+
   @Test
   @Parameters(method = "getListOfRegionTestTypes")
   public void defaultFieldShouldPropogateCorrectlyThroughFunction(
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/RebalanceWithRedundancyDUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/RebalanceWithRedundancyDUnitTest.java
index 720dcc4..7481780 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/RebalanceWithRedundancyDUnitTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/RebalanceWithRedundancyDUnitTest.java
@@ -177,15 +177,16 @@ public class RebalanceWithRedundancyDUnitTest extends LuceneQueriesAccessorBase
     dataStore1.invoke(() -> initDataStore(createIndex, regionTestType));
     dataStore2.invoke(() -> initDataStore(createIndex, regionTestType));
     accessor.invoke(() -> initAccessor(createIndex, regionTestType));
+
     dataStore1.invoke(() -> LuceneTestUtilities.pauseSender(getCache()));
     dataStore2.invoke(() -> LuceneTestUtilities.pauseSender(getCache()));
     accessor.invoke(() -> LuceneTestUtilities.pauseSender(getCache()));
 
     putEntryInEachBucket();
 
-    dataStore1.invoke(() -> LuceneTestUtilities.resumeSender(getCache()));
-    dataStore2.invoke(() -> LuceneTestUtilities.resumeSender(getCache()));
-    accessor.invoke(() -> LuceneTestUtilities.resumeSender(getCache()));
+    dataStore1.invoke(() -> LuceneTestUtilities.resumeSender(basicGetCache()));
+    dataStore2.invoke(() -> LuceneTestUtilities.resumeSender(basicGetCache()));
+    accessor.invoke(() -> LuceneTestUtilities.resumeSender(basicGetCache()));
 
     assertTrue(waitForFlushBeforeExecuteTextSearch(dataStore2, 60000));
 
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
index df7deca..894a193 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
@@ -34,9 +34,14 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
 import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
 import org.apache.geode.cache.lucene.LuceneSerializer;
 import org.apache.geode.cache.lucene.internal.directory.RegionDirectory;
@@ -45,11 +50,14 @@ import org.apache.geode.cache.lucene.internal.partition.BucketTargetingMap;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepository;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepositoryImpl;
 import org.apache.geode.cache.lucene.internal.repository.serializer.HeterogeneousLuceneSerializer;
+import org.apache.geode.cache.partition.PartitionRegionHelper;
 import org.apache.geode.distributed.internal.locks.DLockService;
 import org.apache.geode.internal.cache.BucketAdvisor;
 import org.apache.geode.internal.cache.BucketNotFoundException;
 import org.apache.geode.internal.cache.BucketRegion;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.LocalRegion;
+import org.apache.geode.internal.cache.PartitionRegionConfig;
 import org.apache.geode.internal.cache.PartitionedRegion;
 import org.apache.geode.internal.cache.PartitionedRegion.RetryTimeKeeper;
 import org.apache.geode.internal.cache.PartitionedRegionDataStore;
@@ -59,6 +67,9 @@ import org.apache.geode.test.fake.Fakes;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 @Category(UnitTest.class)
+@RunWith(PowerMockRunner.class)
+@PowerMockIgnore("*.UnitTest")
+@PrepareForTest({PartitionedRegionHelper.class})
 public class PartitionedRepositoryManagerJUnitTest {
 
   protected PartitionedRegion userRegion;
@@ -66,6 +77,9 @@ public class PartitionedRepositoryManagerJUnitTest {
   protected LuceneSerializer serializer;
   protected PartitionedRegionDataStore userDataStore;
   protected PartitionedRegionDataStore fileDataStore;
+  protected PartitionedRegionHelper prHelper;
+  protected PartitionRegionConfig prConfig;
+  protected LocalRegion prRoot;
 
   protected Map<Integer, BucketRegion> fileAndChunkBuckets = new HashMap<Integer,
BucketRegion>();
   protected Map<Integer, BucketRegion> dataBuckets = new HashMap<Integer, BucketRegion>();
@@ -101,6 +115,8 @@ public class PartitionedRepositoryManagerJUnitTest {
     when(fileAndChunkRegion.getDataStore()).thenReturn(fileDataStore);
     when(fileAndChunkRegion.getTotalNumberOfBuckets()).thenReturn(113);
     when(fileAndChunkRegion.getFullPath()).thenReturn("FileRegion");
+    when(fileAndChunkRegion.getCache()).thenReturn(cache);
+    when(fileAndChunkRegion.getRegionIdentifier()).thenReturn("rid");
     indexStats = Mockito.mock(LuceneIndexStats.class);
     fileSystemStats = Mockito.mock(FileSystemStats.class);
     indexForPR = Mockito.mock(LuceneIndexForPartitionedRegion.class);
@@ -112,6 +128,12 @@ public class PartitionedRepositoryManagerJUnitTest {
     when(indexForPR.getAnalyzer()).thenReturn(new StandardAnalyzer());
     when(indexForPR.getCache()).thenReturn(cache);
     when(indexForPR.getRegionPath()).thenReturn("/testRegion");
+    prRoot = Mockito.mock(LocalRegion.class);
+    prConfig = Mockito.mock(PartitionRegionConfig.class);
+    when(prConfig.isColocationComplete()).thenReturn(true);
+    when(prRoot.get("rid")).thenReturn(prConfig);
+    PowerMockito.mockStatic(PartitionedRegionHelper.class);
+    PowerMockito.when(PartitionedRegionHelper.getPRRoot(cache)).thenReturn(prRoot);
     repoManager = new PartitionedRepositoryManager(indexForPR, serializer);
     repoManager.setUserRegionForRepositoryManager();
     repoManager.allowRepositoryComputation();
diff --git a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/management/LuceneManagementDUnitTest.java
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/management/LuceneManagementDUnitTest.java
index adb05fb..5193d59 100644
--- a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/management/LuceneManagementDUnitTest.java
+++ b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/management/LuceneManagementDUnitTest.java
@@ -15,6 +15,8 @@
 package org.apache.geode.cache.lucene.internal.management;
 
 import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.INDEX_NAME;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
@@ -253,8 +255,8 @@ public class LuceneManagementDUnitTest extends ManagementTestBase {
     }
 
     // Verify index metrics counts
-    assertEquals(expectedPuts, totalCommits);
-    assertEquals(expectedPuts, totalUpdates);
+    assertThat("totalCommits", totalCommits, greaterThanOrEqualTo(expectedPuts));
+    assertThat("totalUpdates", totalUpdates, greaterThanOrEqualTo(expectedPuts));
     assertEquals(expectedPuts, totalDocuments);
     assertEquals(expectedQueries, totalQueries);
     assertEquals(expectedHits, totalHits);

-- 
To stop receiving notification emails like this one, please contact
ladyvader@apache.org.

Mime
View raw message