lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rm...@apache.org
Subject svn commit: r1197240 [2/2] - in /lucene/dev/branches/lucene2621: ./ dev-tools/idea/lucene/contrib/ dev-tools/maven/ dev-tools/maven/solr/core/ lucene/ lucene/contrib/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/ lucene/src/java/org/apache/l...
Date Thu, 03 Nov 2011 17:44:21 GMT
Modified: lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java (original)
+++ lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/FacetSearchParamsTest.java Thu Nov  3 17:44:17 2011
@@ -8,8 +8,8 @@ import org.apache.lucene.facet.index.par
 import org.apache.lucene.facet.taxonomy.CategoryPath;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
-import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
-import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
 import org.apache.lucene.facet.util.PartitionsUtils;
 
 /**
@@ -37,8 +37,8 @@ public class FacetSearchParamsTest exten
     assertEquals("unexpected default facet indexing params class", DefaultFacetIndexingParams.class.getName(), fsp.getFacetIndexingParams().getClass().getName());
     assertEquals("no facet requests should be added by default", 0, fsp.getFacetRequests().size());
     Directory dir = newDirectory();
-    new LuceneTaxonomyWriter(dir).close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(dir);
+    new DirectoryTaxonomyWriter(dir).close();
+    TaxonomyReader tr = new DirectoryTaxonomyReader(dir);
     assertEquals("unexpected partition offset for 0 categories", 1, PartitionsUtils.partitionOffset(fsp, 1, tr));
     assertEquals("unexpected partition size for 0 categories", 1, PartitionsUtils.partitionSize(fsp,tr));
     tr.close();
@@ -56,11 +56,11 @@ public class FacetSearchParamsTest exten
   public void testPartitionSizeWithCategories() throws Exception {
     FacetSearchParams fsp = new FacetSearchParams();
     Directory dir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(dir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(dir);
     tw.addCategory(new CategoryPath("a"));
     tw.commit();
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(dir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(dir);
     assertEquals("unexpected partition offset for 1 categories", 2, PartitionsUtils.partitionOffset(fsp, 1, tr));
     assertEquals("unexpected partition size for 1 categories", 2, PartitionsUtils.partitionSize(fsp,tr));
     tr.close();

Modified: lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/MultiIteratorsPerCLParamsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/MultiIteratorsPerCLParamsTest.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/MultiIteratorsPerCLParamsTest.java (original)
+++ lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/params/MultiIteratorsPerCLParamsTest.java Thu Nov  3 17:44:17 2011
@@ -32,8 +32,8 @@ import org.apache.lucene.facet.search.re
 import org.apache.lucene.facet.taxonomy.CategoryPath;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
-import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
-import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
 import org.apache.lucene.facet.util.ScoredDocIdsUtils;
 
 /**
@@ -93,7 +93,7 @@ public class MultiIteratorsPerCLParamsTe
     Directory taxoDir = newDirectory();
     populateIndex(iParams, indexDir, taxoDir);
 
-    TaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
+    TaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
     IndexReader reader = IndexReader.open(indexDir);
 
     CategoryListCache clCache = null;
@@ -168,7 +168,7 @@ public class MultiIteratorsPerCLParamsTe
       Directory taxoDir) throws Exception {
     RandomIndexWriter writer = new RandomIndexWriter(random, indexDir, 
         newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
-    TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
+    TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
 
     for (CategoryPath[] categories : perDocCategories) {
       writer.addDocument(new CategoryDocumentBuilder(taxoWriter, iParams)

Modified: lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java (original)
+++ lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java Thu Nov  3 17:44:17 2011
@@ -14,8 +14,8 @@ import org.junit.Test;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader.ChildrenArrays;
-import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
-import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
+import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
 import org.apache.lucene.util.SlowRAMDirectory;
 
 /**
@@ -159,7 +159,7 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriter() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     // Also check TaxonomyWriter.getSize() - see that the taxonomy's size
     // is what we expect it to be.
@@ -175,7 +175,7 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterTwice() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     // run fillTaxonomy again - this will try to add the same categories
     // again, and check that we see the same ordinal paths again, not
@@ -197,10 +197,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterTwice2() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    tw = new LuceneTaxonomyWriter(indexDir);
+    tw = new DirectoryTaxonomyWriter(indexDir);
     // run fillTaxonomy again - this will try to add the same categories
     // again, and check that we see the same ordinals again, not different
     // ones, and that the number of categories hasn't grown by the new
@@ -222,7 +222,7 @@ public class TestTaxonomyCombined extend
   public void testWriterTwice3() throws Exception {
     Directory indexDir = newDirectory();
     // First, create and fill the taxonomy
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
     // Now, open the same taxonomy and add the same categories again.
@@ -231,7 +231,7 @@ public class TestTaxonomyCombined extend
     // all into memory and close it's reader. The bug was that it closed
     // the reader, but forgot that it did (because it didn't set the reader
     // reference to null).
-    tw = new LuceneTaxonomyWriter(indexDir);
+    tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     // Add one new category, just to make commit() do something:
     tw.addCategory(new CategoryPath("hi"));
@@ -253,7 +253,7 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterSimpler() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     assertEquals(1, tw.getSize()); // the root only
     // Test that adding a new top-level category works
     assertEquals(1, tw.addCategory(new CategoryPath("a")));
@@ -297,12 +297,12 @@ public class TestTaxonomyCombined extend
   @Test
   public void testRootOnly() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     // right after opening the index, it should already contain the
     // root, so have size 1:
     assertEquals(1, tw.getSize());
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     assertEquals(1, tr.getSize());
     assertEquals(0, tr.getPath(0).length());
     assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParent(0));
@@ -319,9 +319,9 @@ public class TestTaxonomyCombined extend
   @Test
   public void testRootOnly2() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     tw.commit();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     assertEquals(1, tr.getSize());
     assertEquals(0, tr.getPath(0).length());
     assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParent(0));
@@ -339,10 +339,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testReaderBasic() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
 
     // test TaxonomyReader.getSize():
     assertEquals(expectedCategories.length, tr.getSize());
@@ -398,10 +398,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testReaderParent() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
 
     // check that the parent of the root ordinal is the invalid ordinal:
     assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParent(0));
@@ -463,11 +463,11 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterParent1() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    tw = new LuceneTaxonomyWriter(indexDir);
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    tw = new DirectoryTaxonomyWriter(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     
     checkWriterParent(tr, tw);
     
@@ -479,10 +479,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterParent2() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.commit();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     
     checkWriterParent(tr, tw);
     
@@ -542,10 +542,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testReaderParentArray() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     int[] parents = tr.getParentArray();
     assertEquals(tr.getSize(), parents.length);
     for (int i=0; i<tr.getSize(); i++) {
@@ -563,10 +563,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testChildrenArrays() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     ChildrenArrays ca = tr.getChildrenArrays();
     int[] youngestChildArray = ca.getYoungestChildArray();
     assertEquals(tr.getSize(), youngestChildArray.length);
@@ -627,10 +627,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testChildrenArraysInvariants() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     tw.close();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     ChildrenArrays ca = tr.getChildrenArrays();
     int[] youngestChildArray = ca.getYoungestChildArray();
     assertEquals(tr.getSize(), youngestChildArray.length);
@@ -707,10 +707,10 @@ public class TestTaxonomyCombined extend
   @Test
   public void testChildrenArraysGrowth() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     tw.addCategory(new CategoryPath("hi", "there"));
     tw.commit();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     ChildrenArrays ca = tr.getChildrenArrays();
     assertEquals(3, tr.getSize());
     assertEquals(3, ca.getOlderSiblingArray().length);
@@ -747,12 +747,12 @@ public class TestTaxonomyCombined extend
   public void testTaxonomyReaderRefreshRaces() throws Exception {
     // compute base child arrays - after first chunk, and after the other
     Directory indexDirBase =  newDirectory();
-    TaxonomyWriter twBase = new LuceneTaxonomyWriter(indexDirBase);
+    TaxonomyWriter twBase = new DirectoryTaxonomyWriter(indexDirBase);
     twBase.addCategory(new CategoryPath("a", "0"));
     final CategoryPath abPath = new CategoryPath("a", "b");
     twBase.addCategory(abPath);
     twBase.commit();
-    TaxonomyReader trBase = new LuceneTaxonomyReader(indexDirBase);
+    TaxonomyReader trBase = new DirectoryTaxonomyReader(indexDirBase);
 
     final ChildrenArrays ca1 = trBase.getChildrenArrays();
     
@@ -779,12 +779,12 @@ public class TestTaxonomyCombined extend
       final int abOrd, final int abYoungChildBase1, final int abYoungChildBase2, final int retry)
       throws Exception {
     SlowRAMDirectory indexDir =  new SlowRAMDirectory(-1,null); // no slowness for intialization
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     tw.addCategory(new CategoryPath("a", "0"));
     tw.addCategory(abPath);
     tw.commit();
     
-    final TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    final TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     for (int i=0; i < 1<<10; i++) { //1024 facets
       final CategoryPath cp = new CategoryPath("a", "b", Integer.toString(i));
       tw.addCategory(cp);
@@ -865,9 +865,9 @@ public class TestTaxonomyCombined extend
   @Test
   public void testSeparateReaderAndWriter() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     tw.commit();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
 
     int author = 1;
 
@@ -932,9 +932,9 @@ public class TestTaxonomyCombined extend
   @Test
   public void testSeparateReaderAndWriter2() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     tw.commit();
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
 
     // Test getOrdinal():
     CategoryPath author = new CategoryPath("Author");
@@ -968,26 +968,26 @@ public class TestTaxonomyCombined extend
   public void testWriterLock() throws Exception {
     // native fslock impl gets angry if we use it, so use RAMDirectory explicitly.
     Directory indexDir = new RAMDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     tw.addCategory(new CategoryPath("hi", "there"));
     tw.commit();
     // we deliberately not close the write now, and keep it open and
     // locked.
     // Verify that the writer worked:
-    TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
+    TaxonomyReader tr = new DirectoryTaxonomyReader(indexDir);
     assertEquals(2, tr.getOrdinal(new CategoryPath("hi", "there")));
     // Try to open a second writer, with the first one locking the directory.
     // We expect to get a LockObtainFailedException.
     try {
-      new LuceneTaxonomyWriter(indexDir);
+      new DirectoryTaxonomyWriter(indexDir);
       fail("should have failed to write in locked directory");
     } catch (LockObtainFailedException e) {
       // this is what we expect to happen.
     }
     // Remove the lock, and now the open should succeed, and we can
     // write to the new writer.
-    LuceneTaxonomyWriter.unlock(indexDir);
-    TaxonomyWriter tw2 = new LuceneTaxonomyWriter(indexDir);
+    DirectoryTaxonomyWriter.unlock(indexDir);
+    TaxonomyWriter tw2 = new DirectoryTaxonomyWriter(indexDir);
     tw2.addCategory(new CategoryPath("hey"));
     tw2.close();
     // See that the writer indeed wrote:
@@ -1054,7 +1054,7 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterCheckPaths() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomyCheckPaths(tw);
     // Also check TaxonomyWriter.getSize() - see that the taxonomy's size
     // is what we expect it to be.
@@ -1073,14 +1073,14 @@ public class TestTaxonomyCombined extend
   @Test
   public void testWriterCheckPaths2() throws Exception {
     Directory indexDir = newDirectory();
-    TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
+    TaxonomyWriter tw = new DirectoryTaxonomyWriter(indexDir);
     fillTaxonomy(tw);
     checkPaths(tw);
     fillTaxonomy(tw);
     checkPaths(tw);
     tw.close();
 
-    tw = new LuceneTaxonomyWriter(indexDir);
+    tw = new DirectoryTaxonomyWriter(indexDir);
     checkPaths(tw);
     fillTaxonomy(tw);
     checkPaths(tw);

Modified: lucene/dev/branches/lucene2621/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/CHANGES.txt?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/CHANGES.txt (original)
+++ lucene/dev/branches/lucene2621/solr/CHANGES.txt Thu Nov  3 17:44:17 2011
@@ -444,6 +444,8 @@ Bug Fixes
 
 * SOLR-2849: Fix dependencies in Maven POMs. (David Smiley via Steve Rowe)
 
+* SOLR-2591: Remove commitLockTimeout option from solrconfig.xml (Luca Cavanna via Martijn van Groningen)
+
 ==================  3.4.0  ==================
 
 Upgrading from Solr 3.3

Modified: lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/solr/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -31,7 +31,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
   </indexDefaults>
 
   <mainIndex>

Modified: lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/client/ruby/solr-ruby/test/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -33,7 +33,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
   </indexDefaults>
 
   <mainIndex>

Modified: lucene/dev/branches/lucene2621/solr/contrib/clustering/src/test-files/clustering/solr/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/clustering/src/test-files/clustering/solr/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/clustering/src/test-files/clustering/solr/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/clustering/src/test-files/clustering/solr/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -45,7 +45,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler-extras/src/test-files/dihextras/solr/conf/dataimport-solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler-extras/src/test-files/dihextras/solr/conf/dataimport-solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler-extras/src/test-files/dihextras/solr/conf/dataimport-solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler-extras/src/test-files/dihextras/solr/conf/dataimport-solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -45,7 +45,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert: Turn on Lucene's auto commit capability.

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/contentstream-solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/contentstream-solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/contentstream-solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/contentstream-solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -45,7 +45,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-nodatasource-solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-nodatasource-solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-nodatasource-solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-nodatasource-solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -45,7 +45,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -45,7 +45,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/contrib/extraction/src/test-files/extraction/solr/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/extraction/src/test-files/extraction/solr/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/extraction/src/test-files/extraction/solr/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/extraction/src/test-files/extraction/solr/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -49,8 +49,6 @@
     <ramBufferSizeMB>32</ramBufferSizeMB>
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:
@@ -72,7 +70,6 @@
     <mergeScheduler>org.apache.lucene.index.ConcurrentMergeScheduler</mergeScheduler>
     <!-- these are global... can't currently override per index -->
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/contrib/uima/src/test-files/uima/solr/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/uima/src/test-files/uima/solr/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/uima/src/test-files/uima/solr/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/uima/src/test-files/uima/solr/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -96,7 +96,6 @@
     <!-- <maxMergeDocs>2147483647</maxMergeDocs> -->
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
       Expert: Turn on Lucene's auto commit capability. This causes

Modified: lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/core/SolrCore.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/core/SolrCore.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/core/SolrCore.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/core/SolrCore.java Thu Nov  3 17:44:17 2011
@@ -51,6 +51,7 @@ import org.apache.solr.util.plugin.Named
 import org.apache.solr.util.plugin.SolrCoreAware;
 import org.apache.solr.util.plugin.PluginInfoInitialized;
 import org.apache.commons.io.IOUtils;
+import org.eclipse.jdt.core.dom.ThisExpression;
 import org.xml.sax.SAXException;
 
 import javax.xml.parsers.ParserConfigurationException;
@@ -815,6 +816,21 @@ public final class SolrCore implements S
      closeHooks.add( hook );
    }
 
+  /** @lucene.internal
+   *  Debugging aid only.  No non-test code should be released with uncommented verbose() calls.  */
+  public static boolean VERBOSE = Boolean.parseBoolean(System.getProperty("tests.verbose","false"));
+  public static void verbose(Object... args) {
+    if (!VERBOSE) return;
+    StringBuilder sb = new StringBuilder("VERBOSE:");
+    sb.append(Thread.currentThread().getName());
+    sb.append(':');
+    for (Object o : args) {
+      sb.append(' ');
+      sb.append(o==null ? "(null)" : o.toString());
+    }
+    System.out.println(sb.toString());
+  }
+
 
   ////////////////////////////////////////////////////////////////////////////////
   // Request Handler
@@ -1106,18 +1122,20 @@ public final class SolrCore implements S
       
       if (newestSearcher != null && solrConfig.reopenReaders
           && indexDirFile.equals(newIndexDirFile)) {
-        
+
         if (updateHandlerReopens) {
           
           tmp = getUpdateHandler().reopenSearcher(newestSearcher.get());
-          
         } else {
           
           IndexReader currentReader = newestSearcher.get().getIndexReader();
           IndexReader newReader;
           
+          // verbose("start reopen without writer, reader=", currentReader);
           newReader = IndexReader.openIfChanged(currentReader);
-          
+          // verbose("reopen result", newReader);
+
+
           if (newReader == null) {
             currentReader.incRef();
             newReader = currentReader;
@@ -1126,8 +1144,11 @@ public final class SolrCore implements S
           tmp = new SolrIndexSearcher(this, schema, "main", newReader, true, true, true, directoryFactory);
         }
 
+
       } else {
+        // verbose("non-reopen START:");
         tmp = new SolrIndexSearcher(this, newIndexDir, schema, getSolrConfig().mainIndexConfig, "main", true, true, directoryFactory);
+        // verbose("non-reopen DONE: searcher=",tmp);
       }
     } catch (Throwable th) {
       synchronized(searcherLock) {
@@ -1160,6 +1181,7 @@ public final class SolrCore implements S
       boolean alreadyRegistered = false;
       synchronized (searcherLock) {
         _searchers.add(newSearchHolder);
+        // verbose("added searcher ",newSearchHolder.get()," to _searchers");
 
         if (_searcher == null) {
           // if there isn't a current searcher then we may

Modified: lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java Thu Nov  3 17:44:17 2011
@@ -27,6 +27,7 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.response.transform.DocTransformer;
@@ -138,6 +139,8 @@ public class RealTimeGetComponent extend
          searcher = searcherHolder.get();
        }
 
+       // SolrCore.verbose("RealTimeGet using searcher ", searcher);
+
        int docid = searcher.getFirstMatch(new Term(idField.getName(), idBytes));
        if (docid < 0) continue;
        Document luceneDocument = searcher.doc(docid);

Modified: lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java Thu Nov  3 17:44:17 2011
@@ -184,7 +184,7 @@ public class SolrIndexSearcher extends I
 
   @Override
   public String toString() {
-    return name;
+    return name + "{" + reader + "}";
   }
 
   public SolrCore getCore() {

Modified: lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java Thu Nov  3 17:44:17 2011
@@ -29,6 +29,7 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.lucene.document.Document;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.Term;
@@ -157,7 +158,11 @@ public class DirectUpdateHandler2 extend
           updateTerm = cmd.updateTerm;
         }
 
-        writer.updateDocument(updateTerm, cmd.getLuceneDocument());
+        Document luceneDocument = cmd.getLuceneDocument();
+        // SolrCore.verbose("updateDocument",updateTerm,luceneDocument,writer);
+        writer.updateDocument(updateTerm, luceneDocument);
+        // SolrCore.verbose("updateDocument",updateTerm,"DONE");
+
         if(del) { // ensure id remains unique
           BooleanQuery bq = new BooleanQuery();
           bq.add(new BooleanClause(new TermQuery(updateTerm), Occur.MUST_NOT));
@@ -195,7 +200,12 @@ public class DirectUpdateHandler2 extend
     deleteByIdCommands.incrementAndGet();
     deleteByIdCommandsCumulative.incrementAndGet();
 
-    solrCoreState.getIndexWriter(core).deleteDocuments(new Term(idField.getName(), cmd.getIndexedId()));
+    IndexWriter writer = solrCoreState.getIndexWriter(core);
+    Term deleteTerm = new Term(idField.getName(), cmd.getIndexedId());
+
+    // SolrCore.verbose("deleteDocuments",deleteTerm,writer);
+    writer.deleteDocuments(deleteTerm);
+    // SolrCore.verbose("deleteDocuments",deleteTerm,"DONE");
 
     ulog.delete(cmd);
  
@@ -312,7 +322,9 @@ public class DirectUpdateHandler2 extend
           ulog.preCommit(cmd);
         }
 
+        // SolrCore.verbose("writer.commit() start writer=",writer);
         writer.commit();
+        // SolrCore.verbose("writer.commit() end");
         numDocsPending.set(0);
         callPostCommitCallbacks();
       } else {
@@ -385,8 +397,10 @@ public class DirectUpdateHandler2 extend
     IndexReader currentReader = previousSearcher.getIndexReader();
     IndexReader newReader;
 
-    newReader = IndexReader.openIfChanged(currentReader, solrCoreState.getIndexWriter(core), true);
-  
+    IndexWriter writer = solrCoreState.getIndexWriter(core);
+    // SolrCore.verbose("start reopen from",previousSearcher,"writer=",writer);
+    newReader = IndexReader.openIfChanged(currentReader, writer, true);
+    // SolrCore.verbose("reopen result", newReader);
     
     if (newReader == null) {
       currentReader.incRef();

Modified: lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/FSUpdateLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/FSUpdateLog.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/FSUpdateLog.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/FSUpdateLog.java Thu Nov  3 17:44:17 2011
@@ -164,7 +164,7 @@ public class FSUpdateLog extends UpdateL
       long pos = tlog.write(cmd);
       LogPtr ptr = new LogPtr(pos);
       map.put(cmd.getIndexedId(), ptr);
-      // System.out.println("TLOG: added id " + cmd.getPrintableId() + " to " + tlog + " " + ptr + " map=" + System.identityHashCode(map));
+      // SolrCore.verbose("TLOG: added id " + cmd.getPrintableId() + " to " + tlog + " " + ptr + " map=" + System.identityHashCode(map));
     }
   }
 
@@ -177,7 +177,7 @@ public class FSUpdateLog extends UpdateL
       long pos = tlog.writeDelete(cmd);
       LogPtr ptr = new LogPtr(pos);
       map.put(br, ptr);
-      // System.out.println("TLOG: added delete for id " + cmd.id + " to " + tlog + " " + ptr + " map=" + System.identityHashCode(map));
+      // SolrCore.verbose("TLOG: added delete for id " + cmd.id + " to " + tlog + " " + ptr + " map=" + System.identityHashCode(map));
     }
   }
 
@@ -191,7 +191,7 @@ public class FSUpdateLog extends UpdateL
       // optimistic concurrency? Maybe we shouldn't support deleteByQuery w/ optimistic concurrency
       long pos = tlog.writeDeleteByQuery(cmd);
       LogPtr ptr = new LogPtr(pos);
-      // System.out.println("TLOG: added deleteByQuery " + cmd.query + " to " + tlog + " " + ptr + " map=" + System.identityHashCode(map));
+      // SolrCore.verbose("TLOG: added deleteByQuery " + cmd.query + " to " + tlog + " " + ptr + " map=" + System.identityHashCode(map));
     }
   }
 
@@ -251,7 +251,7 @@ public class FSUpdateLog extends UpdateL
       // But we do know that any updates already added will definitely
       // show up in the latest reader after the commit succeeds.
       map = new HashMap<BytesRef, LogPtr>();
-      // System.out.println("TLOG: preSoftCommit: prevMap="+ System.identityHashCode(prevMap) + " new map=" + System.identityHashCode(map));
+      // SolrCore.verbose("TLOG: preSoftCommit: prevMap="+ System.identityHashCode(prevMap) + " new map=" + System.identityHashCode(map));
     }
   }
 
@@ -264,7 +264,7 @@ public class FSUpdateLog extends UpdateL
       // If this DUH2 synchronization were to be removed, preSoftCommit should
       // record what old maps were created and only remove those.
       clearOldMaps();
-      // System.out.println("TLOG: postSoftCommit: disposing of prevMap="+ System.identityHashCode(prevMap));
+      // SolrCore.verbose("TLOG: postSoftCommit: disposing of prevMap="+ System.identityHashCode(prevMap));
     }
   }
 
@@ -276,18 +276,18 @@ public class FSUpdateLog extends UpdateL
     synchronized (this) {
       entry = map.get(indexedId);
       lookupLog = tlog;  // something found in "map" will always be in "tlog"
-      // System.out.println("TLOG: lookup: for id " + indexedId.utf8ToString() + " in map " +  System.identityHashCode(map) + " got " + entry + " lookupLog=" + lookupLog);
+      // SolrCore.verbose("TLOG: lookup: for id " + indexedId.utf8ToString() + " in map " +  System.identityHashCode(map) + " got " + entry + " lookupLog=" + lookupLog);
       if (entry == null && prevMap != null) {
         entry = prevMap.get(indexedId);
         // something found in prevMap will always be found in preMapLog (which could be tlog or prevTlog)
         lookupLog = prevMapLog;
-        // System.out.println("TLOG: lookup: for id " + indexedId.utf8ToString() + " in prevMap " +  System.identityHashCode(prevMap) + " got " + entry + " lookupLog="+lookupLog);
+        // SolrCore.verbose("TLOG: lookup: for id " + indexedId.utf8ToString() + " in prevMap " +  System.identityHashCode(prevMap) + " got " + entry + " lookupLog="+lookupLog);
       }
       if (entry == null && prevMap2 != null) {
         entry = prevMap2.get(indexedId);
         // something found in prevMap2 will always be found in preMapLog2 (which could be tlog or prevTlog)
         lookupLog = prevMapLog2;
-        // System.out.println("TLOG: lookup: for id " + indexedId.utf8ToString() + " in prevMap2 " +  System.identityHashCode(prevMap) + " got " + entry + " lookupLog="+lookupLog);
+        // SolrCore.verbose("TLOG: lookup: for id " + indexedId.utf8ToString() + " in prevMap2 " +  System.identityHashCode(prevMap) + " got " + entry + " lookupLog="+lookupLog);
       }
 
       if (entry == null) {
@@ -431,6 +431,7 @@ class TransactionLog {
       this.tlogFile = tlogFile;
       raf = new RandomAccessFile(this.tlogFile, "rw");
       start = raf.length();
+      // System.out.println("###start= "+start);
       channel = raf.getChannel();
       os = Channels.newOutputStream(channel);
       fos = FastOutputStream.wrap(os);
@@ -481,12 +482,22 @@ class TransactionLog {
           pos = start + fos.size();
         }
 
+        /***
+        System.out.println("###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length());
+         if (pos != fos.size()) {
+          throw new RuntimeException("ERROR" + "###writing at " + pos + " fos.size()=" + fos.size() + " raf.length()=" + raf.length());
+        }
+         ***/
+
         codec.init(fos);
         codec.writeTag(JavaBinCodec.ARR, 3);
         codec.writeInt(UpdateLog.ADD);  // should just take one byte
         codec.writeLong(0);  // the version... should also just be one byte if 0
         codec.writeSolrInputDocument(cmd.getSolrInputDocument());
         // fos.flushBuffer();  // flush later
+
+
+
         return pos;
       } catch (IOException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
@@ -546,6 +557,12 @@ class TransactionLog {
       synchronized (fos) {
         // TODO: optimize this by keeping track of what we have flushed up to
         fos.flushBuffer();
+        /***
+         System.out.println("###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos);
+        if (fos.size() != raf.length() || pos >= fos.size() ) {
+          throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos);
+        }
+        ***/
       }
 
       ChannelFastInputStream fis = new ChannelFastInputStream(channel, pos);

Modified: lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java Thu Nov  3 17:44:17 2011
@@ -54,7 +54,6 @@ public class SolrIndexConfig {
     mergeFactor = -1;
     ramBufferSizeMB = 16;
     writeLockTimeout = -1;
-    commitLockTimeout = -1;
     lockType = null;
     termIndexInterval = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
     mergePolicyInfo = null;
@@ -72,7 +71,6 @@ public class SolrIndexConfig {
   public final double ramBufferSizeMB;
 
   public final int writeLockTimeout;
-  public final int commitLockTimeout;
   public final String lockType;
   public final PluginInfo mergePolicyInfo;
   public final PluginInfo mergeSchedulerInfo;
@@ -96,7 +94,6 @@ public class SolrIndexConfig {
     ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB);
 
     writeLockTimeout=solrConfig.getInt(prefix+"/writeLockTimeout", def.writeLockTimeout);
-    commitLockTimeout=solrConfig.getInt(prefix+"/commitLockTimeout", def.commitLockTimeout);
     lockType=solrConfig.get(prefix+"/lockType", def.lockType);
 
     String str =  solrConfig.get(prefix+"/mergeScheduler/text()",null);

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-elevate.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-elevate.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-elevate.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-elevate.xml Thu Nov  3 17:44:17 2011
@@ -56,7 +56,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:
@@ -78,7 +77,6 @@
     <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
     <!-- these are global... can't currently override per index -->
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-enableplugin.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-enableplugin.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-enableplugin.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-enableplugin.xml Thu Nov  3 17:44:17 2011
@@ -39,10 +39,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master.xml Thu Nov  3 17:44:17 2011
@@ -34,11 +34,8 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master1.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master1.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master1.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master1.xml Thu Nov  3 17:44:17 2011
@@ -32,12 +32,8 @@
     <ramBufferSizeMB>32</ramBufferSizeMB>
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
     <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master2.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master2.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master2.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master2.xml Thu Nov  3 17:44:17 2011
@@ -32,13 +32,8 @@
     <ramBufferSizeMB>32</ramBufferSizeMB>
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
     <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
     <lockType>single</lockType>
   </indexDefaults>
 

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master3.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master3.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master3.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-master3.xml Thu Nov  3 17:44:17 2011
@@ -32,12 +32,9 @@
     <ramBufferSizeMB>32</ramBufferSizeMB>
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-repeater.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-repeater.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-repeater.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-repeater.xml Thu Nov  3 17:44:17 2011
@@ -34,10 +34,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave.xml Thu Nov  3 17:44:17 2011
@@ -34,10 +34,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave1.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave1.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave1.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-slave1.xml Thu Nov  3 17:44:17 2011
@@ -33,10 +33,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-solcoreproperties.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-solcoreproperties.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-solcoreproperties.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig-solcoreproperties.xml Thu Nov  3 17:44:17 2011
@@ -39,10 +39,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -71,7 +71,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:
@@ -93,7 +92,6 @@
     <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
     <!-- these are global... can't currently override per index -->
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <lockType>single</lockType>
   </indexDefaults>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/crazy-path-to-config.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/crazy-path-to-config.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/crazy-path-to-config.xml (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/crazy-path-to-config.xml Thu Nov  3 17:44:17 2011
@@ -32,7 +32,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
   </indexDefaults>
 
   <mainIndex>

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java Thu Nov  3 17:44:17 2011
@@ -17,6 +17,14 @@
 package org.apache.solr.search;
 
 
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.*;
+import org.apache.lucene.search.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
 import org.apache.noggit.ObjectBuilder;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
@@ -25,6 +33,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.Ignore;
 
+import java.io.IOException;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -32,6 +41,8 @@ import java.util.concurrent.atomic.Atomi
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
+import static  org.apache.solr.core.SolrCore.verbose;
+
 public class TestRealTimeGet extends SolrTestCaseJ4 {
 
   @BeforeClass
@@ -124,19 +135,6 @@ public class TestRealTimeGet extends Sol
   ***/
 
 
-  public static void verbose(Object... args) {
-    if (!VERBOSE) return;
-    StringBuilder sb = new StringBuilder("TEST:");
-    sb.append(Thread.currentThread().getName());
-    sb.append(':');
-    for (Object o : args) {
-      sb.append(' ');
-      sb.append(o.toString());
-    }
-    System.out.println(sb.toString());
-  }
-
-
   final ConcurrentHashMap<Integer,Long> model = new ConcurrentHashMap<Integer,Long>();
   Map<Integer,Long> committedModel = new HashMap<Integer,Long>();
   long snapshotCount;
@@ -159,14 +157,15 @@ public class TestRealTimeGet extends Sol
     committedModel.putAll(model);
   }
 
-
   @Test
   public void testStressGetRealtime() throws Exception {
     clearIndex();
     assertU(commit());
 
+    // req().getCore().getUpdateHandler().getIndexWriterProvider().getIndexWriter(req().getCore()).setInfoStream(System.out);
+
     final int commitPercent = 5 + random.nextInt(20);
-    final int softCommitPercent = 30+random.nextInt(60); // what percent of the commits are soft
+    final int softCommitPercent = 30+random.nextInt(75); // what percent of the commits are soft
     final int deletePercent = 4+random.nextInt(25);
     final int deleteByQueryPercent = 0;  // real-time get isn't currently supported with delete-by-query
     final int ndocs = 5 + (random.nextBoolean() ? random.nextInt(25) : random.nextInt(200));
@@ -176,10 +175,22 @@ public class TestRealTimeGet extends Sol
 
         // query variables
     final int percentRealtimeQuery = 60;
+    // final AtomicLong operations = new AtomicLong(50000);  // number of query operations to perform in total
     final AtomicLong operations = new AtomicLong(50000);  // number of query operations to perform in total
     int nReadThreads = 5 + random.nextInt(25);
 
 
+    verbose("commitPercent=", commitPercent);
+    verbose("softCommitPercent=",softCommitPercent);
+    verbose("deletePercent=",deletePercent);
+    verbose("deleteByQueryPercent=", deleteByQueryPercent);
+    verbose("ndocs=", ndocs);
+    verbose("nWriteThreads=", nWriteThreads);
+    verbose("nReadThreads=", nReadThreads);
+    verbose("percentRealtimeQuery=", percentRealtimeQuery);
+    verbose("maxConcurrentCommits=", maxConcurrentCommits);
+    verbose("operations=", operations);
+
 
     initModel(ndocs);
 
@@ -205,6 +216,7 @@ public class TestRealTimeGet extends Sol
                 synchronized(TestRealTimeGet.this) {
                   newCommittedModel = new HashMap<Integer,Long>(model);  // take a snapshot
                   version = snapshotCount++;
+                  verbose("took snapshot version=",version);
                 }
 
                 if (rand.nextInt(100) < softCommitPercent) {
@@ -212,9 +224,9 @@ public class TestRealTimeGet extends Sol
                   assertU(h.commit("softCommit","true"));
                   verbose("softCommit end");
                 } else {
-                  verbose("commit start");
+                  verbose("hardCommit start");
                   assertU(commit());
-                  verbose("commit end");
+                  verbose("hardCommit end");
                 }
 
                 synchronized(TestRealTimeGet.this) {
@@ -344,7 +356,7 @@ public class TestRealTimeGet extends Sol
                 assertEquals(1, doclist.size());
                 long foundVal = (Long)(((Map)doclist.get(0)).get(field));
                 if (foundVal < Math.abs(val)) {
-                  verbose("ERROR, id=", id, "foundVal=",foundVal,"model val=",val);
+                  verbose("ERROR, id", id, "foundVal=",foundVal,"model val=",val,"realTime=",realTime);
                   assertTrue(foundVal >= Math.abs(val));
                 }
               }
@@ -372,4 +384,356 @@ public class TestRealTimeGet extends Sol
 
   }
 
+
+
+
+  // The purpose of this test is to roughly model how solr uses lucene
+  IndexReader reader;
+  @Test
+  public void testStressLuceneNRT() throws Exception {
+    final int commitPercent = 5 + random.nextInt(20);
+    final int softCommitPercent = 30+random.nextInt(75); // what percent of the commits are soft
+    final int deletePercent = 4+random.nextInt(25);
+    final int deleteByQueryPercent = 0;  // real-time get isn't currently supported with delete-by-query
+    final int ndocs = 5 + (random.nextBoolean() ? random.nextInt(25) : random.nextInt(200));
+    int nWriteThreads = 5 + random.nextInt(25);
+
+    final int maxConcurrentCommits = nWriteThreads;   // number of committers at a time... it should be <= maxWarmingSearchers
+
+    final AtomicLong operations = new AtomicLong(10000);  // number of query operations to perform in total - crank up if
+    int nReadThreads = 5 + random.nextInt(25);
+    final boolean tombstones = random.nextBoolean();
+    final boolean syncCommits = random.nextBoolean();
+
+    verbose("commitPercent=", commitPercent);
+    verbose("softCommitPercent=",softCommitPercent);
+    verbose("deletePercent=",deletePercent);
+    verbose("deleteByQueryPercent=", deleteByQueryPercent);
+    verbose("ndocs=", ndocs);
+    verbose("nWriteThreads=", nWriteThreads);
+    verbose("nReadThreads=", nReadThreads);
+    verbose("maxConcurrentCommits=", maxConcurrentCommits);
+    verbose("operations=", operations);
+    verbose("tombstones=", tombstones);
+    verbose("syncCommits=", syncCommits);
+
+    initModel(ndocs);
+
+    final AtomicInteger numCommitting = new AtomicInteger();
+
+    List<Thread> threads = new ArrayList<Thread>();
+
+
+    final FieldType idFt = new FieldType();
+    idFt.setIndexed(true);
+    idFt.setStored(true);
+    idFt.setOmitNorms(true);
+    idFt.setTokenized(false);
+    idFt.setIndexOptions(FieldInfo.IndexOptions.DOCS_ONLY);
+
+    final FieldType ft2 = new FieldType();
+    ft2.setIndexed(false);
+    ft2.setStored(true);
+
+
+    // model how solr does locking - only allow one thread to do a hard commit at once, and only one thread to do a soft commit, but
+    // a hard commit in progress does not stop a soft commit.
+    final Lock hardCommitLock = syncCommits ? new ReentrantLock() : null;
+    final Lock reopenLock = syncCommits ? new ReentrantLock() : null;
+
+
+    // RAMDirectory dir = new RAMDirectory();
+    // final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_40, new WhitespaceAnalyzer(Version.LUCENE_40)));
+
+    Directory dir = newDirectory();
+
+    final RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
+    writer.setDoRandomOptimizeAssert(false);
+    writer.w.setInfoStream(VERBOSE ? System.out : null);
+    writer.w.setInfoStream(null);
+
+    // writer.commit();
+    // reader = IndexReader.open(dir);
+    // make this reader an NRT reader from the start to avoid the first non-writer openIfChanged
+    // to only opening at the last commit point.
+    reader = IndexReader.open(writer.w, true);
+
+    for (int i=0; i<nWriteThreads; i++) {
+      Thread thread = new Thread("WRITER"+i) {
+        Random rand = new Random(random.nextInt());
+
+        @Override
+        public void run() {
+          try {
+            while (operations.get() > 0) {
+              int oper = rand.nextInt(100);
+
+              if (oper < commitPercent) {
+                if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
+                  Map<Integer,Long> newCommittedModel;
+                  long version;
+                  IndexReader oldReader;
+
+                  boolean softCommit = rand.nextInt(100) < softCommitPercent;
+
+                  if (!softCommit) {
+                    // only allow one hard commit to proceed at once
+                    if (hardCommitLock != null) hardCommitLock.lock();
+                    verbose("hardCommit start");
+
+                    writer.commit();
+                  }
+
+                  if (reopenLock != null) reopenLock.lock();
+
+                  synchronized(TestRealTimeGet.this) {
+                    newCommittedModel = new HashMap<Integer,Long>(model);  // take a snapshot
+                    version = snapshotCount++;
+                    oldReader = reader;
+                    oldReader.incRef();  // increment the reference since we will use this for reopening
+                  }
+
+                  if (!softCommit) {
+                    // must commit after taking a snapshot of the model
+                    // writer.commit();
+                  }
+
+                  verbose("reopen start using", oldReader);
+
+                  IndexReader newReader;
+                  if (softCommit) {
+                    newReader = IndexReader.openIfChanged(oldReader, writer.w, true);
+                  } else {
+                    // will only open to last commit
+                   newReader = IndexReader.openIfChanged(oldReader);
+                  }
+
+
+                  if (newReader == null) {
+                    oldReader.incRef();
+                    newReader = oldReader;
+                  }
+                  oldReader.decRef();
+
+                  verbose("reopen result", newReader);
+
+                  synchronized(TestRealTimeGet.this) {
+                    assert newReader.getRefCount() > 0;
+                    assert reader.getRefCount() > 0;
+
+                    // install the new reader if it's newest (and check the current version since another reader may have already been installed)
+                    if (newReader.getVersion() > reader.getVersion()) {
+                      reader.decRef();
+                      reader = newReader;
+
+                      // install this snapshot only if it's newer than the current one
+                      if (version >= committedModelClock) {
+                        committedModel = newCommittedModel;
+                        committedModelClock = version;
+                      }
+
+                    } else {
+                      // close if unused
+                      newReader.decRef();
+                    }
+
+                  }
+
+                  if (reopenLock != null) reopenLock.unlock();
+
+                  if (!softCommit) {
+                    if (hardCommitLock != null) hardCommitLock.unlock();
+                  }
+
+                }
+                numCommitting.decrementAndGet();
+                continue;
+              }
+
+
+              int id = rand.nextInt(ndocs);
+              Object sync = syncArr[id];
+
+              // set the lastId before we actually change it sometimes to try and
+              // uncover more race conditions between writing and reading
+              boolean before = rand.nextBoolean();
+              if (before) {
+                lastId = id;
+              }
+
+              // We can't concurrently update the same document and retain our invariants of increasing values
+              // since we can't guarantee what order the updates will be executed.
+              synchronized (sync) {
+                Long val = model.get(id);
+                long nextVal = Math.abs(val)+1;
+
+                if (oper < commitPercent + deletePercent) {
+                  // add tombstone first
+                  if (tombstones) {
+                    Document d = new Document();
+                    d.add(new Field("id","-"+Integer.toString(id), idFt));
+                    d.add(new Field(field, Long.toString(nextVal), ft2));
+                    verbose("adding tombstone for id",id,"val=",nextVal);
+                    writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d);
+                  }
+
+                  verbose("deleting id",id,"val=",nextVal);
+                  writer.deleteDocuments(new Term("id",Integer.toString(id)));
+                  model.put(id, -nextVal);
+                  verbose("deleting id",id,"val=",nextVal,"DONE");
+
+                } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
+                  //assertU("<delete><query>id:" + id + "</query></delete>");
+
+                  // add tombstone first
+                  if (tombstones) {
+                    Document d = new Document();
+                    d.add(new Field("id","-"+Integer.toString(id), idFt));
+                    d.add(new Field(field, Long.toString(nextVal), ft2));
+                    verbose("adding tombstone for id",id,"val=",nextVal);
+                    writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d);
+                  }
+
+                  verbose("deleteByQuery",id,"val=",nextVal);
+                  writer.deleteDocuments(new TermQuery(new Term("id", Integer.toString(id))));
+                  model.put(id, -nextVal);
+                  verbose("deleteByQuery",id,"val=",nextVal,"DONE");
+                } else {
+                  // model.put(id, nextVal);   // uncomment this and this test should fail.
+
+                  // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal)));
+                  Document d = new Document();
+                  d.add(new Field("id",Integer.toString(id), idFt));
+                  d.add(new Field(field, Long.toString(nextVal), ft2));
+                  verbose("adding id",id,"val=",nextVal);
+                  writer.updateDocument(new Term("id", Integer.toString(id)), d);
+                  if (tombstones) {
+                    // remove tombstone after new addition (this should be optional?)
+                    verbose("deleting tombstone for id",id);
+                    writer.deleteDocuments(new Term("id","-"+Integer.toString(id)));
+                    verbose("deleting tombstone for id",id,"DONE");
+                  }
+
+                  model.put(id, nextVal);
+                  verbose("adding id",id,"val=",nextVal,"DONE");
+                }
+              }
+
+              if (!before) {
+                lastId = id;
+              }
+            }
+          } catch (Exception  ex) {
+            throw new RuntimeException(ex);
+          }
+        }
+      };
+
+      threads.add(thread);
+    }
+
+
+    for (int i=0; i<nReadThreads; i++) {
+      Thread thread = new Thread("READER"+i) {
+        Random rand = new Random(random.nextInt());
+
+        @Override
+        public void run() {
+          try {
+            while (operations.decrementAndGet() >= 0) {
+              // bias toward a recently changed doc
+              int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
+
+              // when indexing, we update the index, then the model
+              // so when querying, we should first check the model, and then the index
+
+              long val;
+
+              synchronized(TestRealTimeGet.this) {
+                val = committedModel.get(id);
+              }
+
+
+              IndexReader r;
+              synchronized(TestRealTimeGet.this) {
+                r = reader;
+                r.incRef();
+              }
+
+              int docid = getFirstMatch(r, new Term("id",Integer.toString(id)));
+
+              if (docid < 0 && tombstones) {
+                // if we couldn't find the doc, look for it's tombstone
+                docid = getFirstMatch(r, new Term("id","-"+Integer.toString(id)));
+                if (docid < 0) {
+                  if (val == -1L) {
+                    // expected... no doc was added yet
+                    r.decRef();
+                    continue;
+                  }
+                  verbose("ERROR: Couldn't find a doc  or tombstone for id", id, "using reader",r,"expected value",val);
+                  fail("No documents or tombstones found for id " + id + ", expected at least " + val);
+                }
+              }
+
+              if (docid < 0 && !tombstones) {
+                // nothing to do - we can't tell anything from a deleted doc without tombstones
+              } else {
+                if (docid < 0) {
+                  verbose("ERROR: Couldn't find a doc for id", id, "using reader",r);
+                }
+                assertTrue(docid >= 0);   // we should have found the document, or it's tombstone
+                Document doc = r.document(docid);
+                long foundVal = Long.parseLong(doc.get(field));
+                if (foundVal < Math.abs(val)) {
+                  verbose("ERROR: id",id,"model_val=",val," foundVal=",foundVal,"reader=",reader);
+                }
+                assertTrue(foundVal >= Math.abs(val));
+              }
+
+              r.decRef();
+            }
+          }
+          catch (Throwable e) {
+            operations.set(-1L);
+            SolrException.log(log,e);
+            fail(e.toString());
+          }
+        }
+      };
+
+      threads.add(thread);
+    }
+
+
+    for (Thread thread : threads) {
+      thread.start();
+    }
+
+    for (Thread thread : threads) {
+      thread.join();
+    }
+
+    writer.close();
+    reader.close();
+    dir.close();
+  }
+
+
+  public int getFirstMatch(IndexReader r, Term t) throws IOException {
+    Fields fields = MultiFields.getFields(r);
+    if (fields == null) return -1;
+    Terms terms = fields.terms(t.field());
+    if (terms == null) return -1;
+    BytesRef termBytes = t.bytes();
+    DocsEnum docs = terms.docs(MultiFields.getLiveDocs(r), termBytes, null);
+    if (docs == null) return -1;
+    int id = docs.nextDoc();
+    if (id != DocIdSetIterator.NO_MORE_DOCS) {
+      int next = docs.nextDoc();
+      assertEquals(DocIdSetIterator.NO_MORE_DOCS, next);
+    }
+    return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id;
+  }
+
 }

Modified: lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java (original)
+++ lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java Thu Nov  3 17:44:17 2011
@@ -104,9 +104,8 @@ public class SoftAutoCommitTest extends 
     // toss in another doc, shouldn't affect first hard commit time we poll
     assertU(adoc("id", "530", "subject", "just for noise/activity"));
 
-    // wait for the hard commit, shouldn't need any fudge given 
-    // other actions already taken
-    Long hard529 = monitor.hard.poll(hardCommitWaitMillis * 2, MILLISECONDS);
+    // wait for the hard commit
+    Long hard529 = monitor.hard.poll(hardCommitWaitMillis * 5, MILLISECONDS);
     assertNotNull("hard529 wasn't fast enough", hard529);
     monitor.assertSaneOffers();
     
@@ -226,9 +225,8 @@ public class SoftAutoCommitTest extends 
     // toss in another doc, shouldn't affect first hard commit time we poll
     assertU(adoc("id", "550", "subject", "just for noise/activity"));
 
-    // wait for the hard commit, shouldn't need any fudge given 
-    // other actions already taken
-    hard529 = monitor.hard.poll(hardCommitWaitMillis * 2, MILLISECONDS);
+    // wait for the hard commit
+    hard529 = monitor.hard.poll(hardCommitWaitMillis * 3, MILLISECONDS);
     assertNotNull("hard529 wasn't fast enough", hard529);
     monitor.assertSaneOffers();
     

Modified: lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/db/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/db/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/db/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/db/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -50,7 +50,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/mail/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/mail/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/mail/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/mail/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -49,7 +49,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/rss/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/rss/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/rss/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/rss/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -50,7 +50,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/tika/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/tika/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/tika/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/example/example-DIH/solr/tika/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -49,7 +49,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!--
      Expert:

Modified: lucene/dev/branches/lucene2621/solr/example/solr/conf/solrconfig.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/example/solr/conf/solrconfig.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/example/solr/conf/solrconfig.xml (original)
+++ lucene/dev/branches/lucene2621/solr/example/solr/conf/solrconfig.xml Thu Nov  3 17:44:17 2011
@@ -130,7 +130,6 @@
 
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
 
     <!-- Expert: Merge Policy 
 

Modified: lucene/dev/branches/lucene2621/solr/solrj/src/test-files/solrj/solr/conf/solrconfig-slave1.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/solrj/src/test-files/solrj/solr/conf/solrconfig-slave1.xml?rev=1197240&r1=1197239&r2=1197240&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/solrj/src/test-files/solrj/solr/conf/solrconfig-slave1.xml (original)
+++ lucene/dev/branches/lucene2621/solr/solrj/src/test-files/solrj/solr/conf/solrconfig-slave1.xml Thu Nov  3 17:44:17 2011
@@ -33,11 +33,6 @@
     <maxMergeDocs>2147483647</maxMergeDocs>
     <maxFieldLength>10000</maxFieldLength>
     <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
-    <writeLockTimeout>1000</writeLockTimeout>
-    <commitLockTimeout>10000</commitLockTimeout>
-
     <lockType>single</lockType>
   </indexDefaults>
 



Mime
View raw message