lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1202091 [3/6] - in /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/ src/core/Index/ src/core/Search/ src/core/Store/ src/demo/Demo.Common/ test/core/ test/core/Analysis/ test/core/Index/ test/core/QueryParser/ test/core/Sea...
Date Tue, 15 Nov 2011 08:41:46 GMT
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriter.cs?rev=1202091&r1=1202090&r2=1202091&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Index/TestIndexWriter.cs Tue Nov 15 08:41:44 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.Threading;
 using Lucene.Net.Support;
 using NUnit.Framework;
 
@@ -53,22 +54,16 @@ using ScoreDoc = Lucene.Net.Search.Score
 using TermQuery = Lucene.Net.Search.TermQuery;
 using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
 using _TestUtil = Lucene.Net.Util._TestUtil;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary> </summary>
     [TestFixture]
     public class TestIndexWriter : BaseTokenStreamTestCase
     {
-        internal static System.Collections.Hashtable testWithNewApiData = new System.Collections.Hashtable();
-        static TestIndexWriter()
+        public TestIndexWriter(System.String name)
+            : base(name)
         {
-            System.String[] data = new System.String[] { "TestExceptionFromTokenStream", "TestDocumentsWriterExceptions", "TestNegativePositions", "TestEndOffsetPositionWithCachingTokenFilter", "TestEndOffsetPositionWithTeeSinkTokenFilter", "TestEndOffsetPositionStandard", "TestEndOffsetPositionStandardEmptyField", "TestEndOffsetPositionStandardEmptyField2" };
-            for (int i = 0; i < data.Length; i++)
-            {
-                testWithNewApiData.Add(data[i], data[i]);
-            }
         }
 
         public class MyRAMDirectory : RAMDirectory
@@ -151,7 +146,7 @@ namespace Lucene.Net.Index
 
             public override /*virtual*/ TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
             {
-                return new AnonymousClassTokenFilter(this, new StandardTokenizer(reader));
+                return new AnonymousClassTokenFilter(this, new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader));
             }
         }
         private class AnonymousClassAnalyzer1 : Analyzer
@@ -359,8 +354,8 @@ namespace Lucene.Net.Index
             private void InitBlock(TestIndexWriter enclosingInstance)
             {
                 this.enclosingInstance = enclosingInstance;
-                this.termAtt = (TermAttribute)this.AddAttribute(typeof(TermAttribute));
-                this.posIncrAtt = (PositionIncrementAttribute)this.AddAttribute(typeof(PositionIncrementAttribute));
+                this.termAtt = this.AddAttribute<TermAttribute>();
+                this.posIncrAtt = this.AddAttribute<PositionIncrementAttribute>();
             }
             private TestIndexWriter enclosingInstance;
             public TestIndexWriter Enclosing_Instance
@@ -391,7 +386,7 @@ namespace Lucene.Net.Index
         private class AnonymousClassIndexWriter : IndexWriter
         {
             public AnonymousClassIndexWriter(System.Collections.IList thrown, TestIndexWriter enclosingInstance, MockRAMDirectory mockRAMDir, StandardAnalyzer standardAnalyzer)
-                : base(mockRAMDir, standardAnalyzer)
+                : base(mockRAMDir, standardAnalyzer, MaxFieldLength.UNLIMITED)
             {
                 InitBlock(thrown, enclosingInstance);
             }
@@ -419,15 +414,6 @@ namespace Lucene.Net.Index
                 }
             }
         }
-        public TestIndexWriter(System.String name)
-            : base(name, testWithNewApiData)
-        {
-        }
-
-        public TestIndexWriter()
-            : base("", testWithNewApiData)
-        {
-        }
 
         [Test]
         public virtual void TestDocCount()
@@ -450,11 +436,11 @@ namespace Lucene.Net.Index
             {
                 AddDoc(writer);
             }
-            Assert.AreEqual(100, writer.DocCount());
+            Assert.AreEqual(100, writer.MaxDoc());
             writer.Close();
 
             // delete 40 documents
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, false);
             for (i = 0; i < 40; i++)
             {
                 reader.DeleteDocument(i);
@@ -463,16 +449,16 @@ namespace Lucene.Net.Index
 
             // test doc count before segments are merged/index is optimized
             writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-            Assert.AreEqual(100, writer.DocCount());
+            Assert.AreEqual(100, writer.MaxDoc());
             writer.Close();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(100, reader.MaxDoc());
             Assert.AreEqual(60, reader.NumDocs());
             reader.Close();
 
             // optimize the index and check that the new doc count is correct
-            writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             Assert.AreEqual(100, writer.MaxDoc());
             Assert.AreEqual(60, writer.NumDocs());
             writer.Optimize();
@@ -481,7 +467,7 @@ namespace Lucene.Net.Index
             writer.Close();
 
             // check that the index reader gives the same numbers.
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(60, reader.MaxDoc());
             Assert.AreEqual(60, reader.NumDocs());
             reader.Close();
@@ -511,7 +497,7 @@ namespace Lucene.Net.Index
 
         /*
         Test: make sure when we run out of disk space or hit
-        random IOExceptions in any of the addIndexes(*) calls
+        random IOExceptions in any of the AddIndexesNoOptimize(*) calls
         that 1) index is not corrupt (searcher can open/search
         it) and 2) transactional semantics are followed:
         either all or none of the incoming documents were in
@@ -527,7 +513,7 @@ namespace Lucene.Net.Index
             bool debug = false;
 
             // Build up a bunch of dirs that have indexes which we
-            // will then merge together by calling addIndexes(*):
+            // will then merge together by calling AddIndexesNoOptimize(*):
             Directory[] dirs = new Directory[NUM_DIR];
             long inputDiskUsage = 0;
             for (int i = 0; i < NUM_DIR; i++)
@@ -547,7 +533,7 @@ namespace Lucene.Net.Index
             }
 
             // Now, build a starting index that has START_COUNT docs.  We
-            // will then try to addIndexes into a copy of this:
+            // will then try to AddIndexesNoOptimize into a copy of this:
             RAMDirectory startDir = new RAMDirectory();
             IndexWriter writer2 = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
             for (int j = 0; j < START_COUNT; j++)
@@ -558,7 +544,7 @@ namespace Lucene.Net.Index
 
             // Make sure starting index seems to be working properly:
             Term searchTerm = new Term("content", "aaa");
-            IndexReader reader = IndexReader.Open(startDir);
+            IndexReader reader = IndexReader.Open(startDir, true);
             Assert.AreEqual(57, reader.DocFreq(searchTerm), "first docFreq");
 
             IndexSearcher searcher = new IndexSearcher(reader);
@@ -569,12 +555,12 @@ namespace Lucene.Net.Index
 
             // Iterate with larger and larger amounts of free
             // disk space.  With little free disk space,
-            // addIndexes will certainly run out of space &
+            // AddIndexesNoOptimize will certainly run out of space &
             // fail.  Verify that when this happens, index is
             // not corrupt and index in fact has added no
             // documents.  Then, we increase disk space by 2000
             // bytes each iteration.  At some point there is
-            // enough free disk space and addIndexes should
+            // enough free disk space and AddIndexesNoOptimize should
             // succeed and index should show all documents were
             // added.
 
@@ -588,7 +574,7 @@ namespace Lucene.Net.Index
                 startDiskUsage += startDir.FileLength(files2[i]);
             }
 
-            for (int iter = 0; iter < 6; iter++)
+            for (int iter = 0; iter < 3; iter++)
             {
 
                 if (debug)
@@ -597,8 +583,7 @@ namespace Lucene.Net.Index
                 // Start with 100 bytes more than we are currently using:
                 long diskFree = diskUsage + 100;
 
-                bool autoCommit = iter % 2 == 0;
-                int method = iter / 2;
+                int method = iter;
 
                 bool success = false;
                 bool done = false;
@@ -606,7 +591,7 @@ namespace Lucene.Net.Index
                 System.String methodName;
                 if (0 == method)
                 {
-                    methodName = "addIndexes(Directory[])";
+                    methodName = "addIndexes(Directory[]) + optimize()";
                 }
                 else if (1 == method)
                 {
@@ -622,7 +607,7 @@ namespace Lucene.Net.Index
 
                     // Make a new dir that will enforce disk usage:
                     MockRAMDirectory dir = new MockRAMDirectory(startDir);
-                    writer2 = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
+                    writer2 = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
                     System.IO.IOException err = null;
 
                     MergeScheduler ms = writer2.GetMergeScheduler();
@@ -663,14 +648,14 @@ namespace Lucene.Net.Index
                                 rate = 0.0;
                             }
                             if (debug)
-                                testName = "disk full test " + methodName + " with disk full at " + diskFree + " bytes autoCommit=" + autoCommit;
+                                testName = "disk full test " + methodName + " with disk full at " + diskFree + " bytes";
                         }
                         else
                         {
                             thisDiskFree = 0;
                             rate = 0.0;
                             if (debug)
-                                testName = "disk full test " + methodName + " with unlimited disk space autoCommit=" + autoCommit;
+                                testName = "disk full test " + methodName + " with unlimited disk space";
                         }
 
                         if (debug)
@@ -684,14 +669,15 @@ namespace Lucene.Net.Index
 
                             if (0 == method)
                             {
-                                writer2.AddIndexes(dirs);
+                                writer2.AddIndexesNoOptimize(dirs);
+                                writer2.Optimize();
                             }
                             else if (1 == method)
                             {
                                 IndexReader[] readers = new IndexReader[dirs.Length];
                                 for (int i = 0; i < dirs.Length; i++)
                                 {
-                                    readers[i] = IndexReader.Open(dirs[i]);
+                                    readers[i] = IndexReader.Open(dirs[i], true);
                                 }
                                 try
                                 {
@@ -742,33 +728,6 @@ namespace Lucene.Net.Index
                         // ConcurrentMergeScheduler are done
                         _TestUtil.SyncConcurrentMerges(writer2);
 
-                        if (autoCommit)
-                        {
-
-                            // Whether we succeeded or failed, check that
-                            // all un-referenced files were in fact
-                            // deleted (ie, we did not create garbage).
-                            // Only check this when autoCommit is true:
-                            // when it's false, it's expected that there
-                            // are unreferenced files (ie they won't be
-                            // referenced until the "commit on close").
-                            // Just create a new IndexFileDeleter, have it
-                            // delete unreferenced files, then verify that
-                            // in fact no files were deleted:
-
-                            System.String successStr;
-                            if (success)
-                            {
-                                successStr = "success";
-                            }
-                            else
-                            {
-                                successStr = "IOException";
-                            }
-                            System.String message = methodName + " failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes)";
-                            AssertNoUnreferencedFiles(dir, message);
-                        }
-
                         if (debug)
                         {
                             System.Console.Out.WriteLine("  now test readers");
@@ -780,7 +739,7 @@ namespace Lucene.Net.Index
                         // (transactional semantics):
                         try
                         {
-                            reader = IndexReader.Open(dir);
+                            reader = IndexReader.Open(dir, true);
                         }
                         catch (System.IO.IOException e)
                         {
@@ -790,13 +749,9 @@ namespace Lucene.Net.Index
                         int result = reader.DocFreq(searchTerm);
                         if (success)
                         {
-                            if (autoCommit && result != END_COUNT)
-                            {
-                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
-                            }
-                            else if (!autoCommit && result != START_COUNT)
+                            if (result != START_COUNT)
                             {
-                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " [autoCommit = false]");
+                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT);
                             }
                         }
                         else
@@ -896,12 +851,11 @@ namespace Lucene.Net.Index
 
             bool debug = false;
 
-            for (int pass = 0; pass < 3; pass++)
+            for (int pass = 0; pass < 2; pass++)
             {
                 if (debug)
                     System.Console.Out.WriteLine("TEST: pass=" + pass);
-                bool autoCommit = pass == 0;
-                bool doAbort = pass == 2;
+                bool doAbort = pass == 1;
                 long diskFree = 200;
                 while (true)
                 {
@@ -909,7 +863,7 @@ namespace Lucene.Net.Index
                         System.Console.Out.WriteLine("TEST: cycle: diskFree=" + diskFree);
                     MockRAMDirectory dir = new MockRAMDirectory();
                     dir.SetMaxSizeInBytes(diskFree);
-                    IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 
                     MergeScheduler ms = writer.GetMergeScheduler();
                     if (ms is ConcurrentMergeScheduler)
@@ -940,7 +894,7 @@ namespace Lucene.Net.Index
                     {
                         if (doAbort)
                         {
-                            writer.Abort();
+                            writer.Rollback();
                         }
                         else
                         {
@@ -962,10 +916,10 @@ namespace Lucene.Net.Index
 
                         _TestUtil.SyncConcurrentMerges(ms);
 
-                        AssertNoUnreferencedFiles(dir, "after disk full during addDocument with autoCommit=" + autoCommit);
+                        AssertNoUnreferencedFiles(dir, "after disk full during addDocument");
 
                         // Make sure reader can open the index:
-                        IndexReader.Open(dir).Close();
+                        IndexReader.Open(dir, true).Close();
 
                         dir.Close();
 
@@ -1004,7 +958,7 @@ namespace Lucene.Net.Index
         public virtual void TestWickedLongTerm()
         {
             RAMDirectory dir = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 
             char[] chars = new char[DocumentsWriter.CHAR_BLOCK_SIZE_ForNUnit - 1];
             CollectionsHelper.Fill(chars, 'x');
@@ -1023,7 +977,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
 
             // Make sure all terms < max size were indexed
             Assert.AreEqual(2, reader.DocFreq(new Term("content", "abc")));
@@ -1048,12 +1002,12 @@ namespace Lucene.Net.Index
             // maximum length term, and search on that term:
             doc = new Document();
             doc.Add(new Field("content", bigTerm, Field.Store.NO, Field.Index.ANALYZED));
-            StandardAnalyzer sa = new StandardAnalyzer();
+            StandardAnalyzer sa = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
             sa.SetMaxTokenLength(100000);
             writer = new IndexWriter(dir, sa, IndexWriter.MaxFieldLength.LIMITED);
             writer.AddDocument(doc);
             writer.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(1, reader.DocFreq(new Term("content", bigTerm)));
             reader.Close();
 
@@ -1063,7 +1017,6 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestOptimizeMaxNumSegments()
         {
-
             MockRAMDirectory dir = new MockRAMDirectory();
 
             Document doc = new Document();
@@ -1205,7 +1158,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestCreateWithReader()
         {
-            System.IO.FileInfo indexDir = _TestUtil.GetTempDir("lucenetestindexwriter");
+            System.IO.DirectoryInfo indexDir = _TestUtil.GetTempDir("lucenetestindexwriter");
 
             try
             {
@@ -1217,17 +1170,17 @@ namespace Lucene.Net.Index
                 writer.Close();
 
                 // now open reader:
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
 
                 // now open index for create:
                 writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                Assert.AreEqual(writer.DocCount(), 0, "should be zero documents");
+                Assert.AreEqual(writer.MaxDoc(), 0, "should be zero documents");
                 AddDoc(writer);
                 writer.Close();
 
                 Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-                IndexReader reader2 = IndexReader.Open(dir);
+                IndexReader reader2 = IndexReader.Open(dir, true);
                 Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
                 reader.Close();
                 reader2.Close();
@@ -1238,78 +1191,6 @@ namespace Lucene.Net.Index
             }
         }
 
-
-        // Same test as above, but use IndexWriter constructor
-        // that takes File:
-        [Test]
-        public virtual void TestCreateWithReader2()
-        {
-            System.IO.FileInfo indexDir = _TestUtil.GetTempDir("lucenetestindexwriter");
-            try
-            {
-                // add one document & close writer
-                IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                AddDoc(writer);
-                writer.Close();
-
-                // now open reader:
-                IndexReader reader = IndexReader.Open(indexDir);
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-
-                // now open index for create:
-                writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                Assert.AreEqual(writer.DocCount(), 0, "should be zero documents");
-                AddDoc(writer);
-                writer.Close();
-
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-                IndexReader reader2 = IndexReader.Open(indexDir);
-                Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
-                reader.Close();
-                reader2.Close();
-            }
-            finally
-            {
-                RmDir(indexDir);
-            }
-        }
-
-        // Same test as above, but use IndexWriter constructor
-        // that takes String:
-        [Test]
-        public virtual void TestCreateWithReader3()
-        {
-            System.IO.FileInfo dirName = _TestUtil.GetTempDir("lucenetestindexwriter");
-            try
-            {
-
-                // add one document & close writer
-                IndexWriter writer = new IndexWriter(dirName, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                AddDoc(writer);
-                writer.Close();
-
-                // now open reader:
-                IndexReader reader = IndexReader.Open(dirName);
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-
-                // now open index for create:
-                writer = new IndexWriter(dirName, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                Assert.AreEqual(writer.DocCount(), 0, "should be zero documents");
-                AddDoc(writer);
-                writer.Close();
-
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-                IndexReader reader2 = IndexReader.Open(dirName);
-                Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
-                reader.Close();
-                reader2.Close();
-            }
-            finally
-            {
-                RmDir(dirName);
-            }
-        }
-
         // Simulate a writer that crashed while writing segments
         // file: make sure we can still open the index (ie,
         // gracefully fallback to the previous segments file),
@@ -1353,7 +1234,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
             }
             catch (System.Exception e)
             {
@@ -1421,7 +1302,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 Assert.Fail("reader did not hit IOException on opening a corrupt index");
             }
             catch (System.Exception e)
@@ -1494,7 +1375,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 Assert.Fail("reader did not hit IOException on opening a corrupt index");
             }
             catch (System.Exception e)
@@ -1507,11 +1388,10 @@ namespace Lucene.Net.Index
         }
 
         /*
-        * Simple test for "commit on close": open writer with
-        * autoCommit=false, so it will only commit on close,
-        * then add a bunch of docs, making sure reader does not
-        * see these docs until writer is closed.
-        */
+         * Simple test for "commit on close": open writer then 
+         * add a bunch of docs, making sure reader does not
+         * see these docs until writer is closed.
+         */
         [Test]
         public virtual void TestCommitOnClose()
         {
@@ -1524,12 +1404,12 @@ namespace Lucene.Net.Index
             writer.Close();
 
             Term searchTerm = new Term("content", "aaa");
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(14, hits.Length, "first number of hits");
             searcher.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
 
             writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
             for (int i = 0; i < 3; i++)
@@ -1538,9 +1418,9 @@ namespace Lucene.Net.Index
                 {
                     AddDoc(writer);
                 }
-                searcher = new IndexSearcher(dir);
+                searcher = new IndexSearcher(dir, true);
                 hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer with autoCommit disabled");
+                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
                 searcher.Close();
                 Assert.IsTrue(reader.IsCurrent(), "reader should have still been current");
             }
@@ -1549,16 +1429,15 @@ namespace Lucene.Net.Index
             writer.Close();
             Assert.IsFalse(reader.IsCurrent(), "reader should not be current now");
 
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(47, hits.Length, "reader did not see changes after writer was closed");
             searcher.Close();
         }
 
         /*
-        * Simple test for "commit on close": open writer with
-        * autoCommit=false, so it will only commit on close,
-        * then add a bunch of docs, making sure reader does not
+        * Simple test for "commit on close": open writer then 
+        * add a bunch of docs, making sure reader does not
         * see them until writer has closed.  Then instead of
         * closing the writer, call abort and verify reader sees
         * nothing was added.  Then verify we can open the index
@@ -1577,7 +1456,7 @@ namespace Lucene.Net.Index
             writer.Close();
 
             Term searchTerm = new Term("content", "aaa");
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(14, hits.Length, "first number of hits");
             searcher.Close();
@@ -1591,17 +1470,17 @@ namespace Lucene.Net.Index
             // Delete all docs:
             writer.DeleteDocuments(searchTerm);
 
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-            Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer with autoCommit disabled");
+            Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
             searcher.Close();
 
             // Now, close the writer:
-            writer.Abort();
+            writer.Rollback();
 
-            AssertNoUnreferencedFiles(dir, "unreferenced files remain after abort()");
+            AssertNoUnreferencedFiles(dir, "unreferenced files remain after Rollback()");
 
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(14, hits.Length, "saw changes after writer.abort");
             searcher.Close();
@@ -1621,14 +1500,14 @@ namespace Lucene.Net.Index
                 {
                     AddDoc(writer);
                 }
-                searcher = new IndexSearcher(dir);
+                searcher = new IndexSearcher(dir, false);
                 hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer with autoCommit disabled");
+                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
                 searcher.Close();
             }
 
             writer.Close();
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(218, hits.Length, "didn't see changes after close");
             searcher.Close();
@@ -1668,7 +1547,7 @@ namespace Lucene.Net.Index
             writer.Optimize();
             writer.Close();
 
-            IndexReader.Open(dir).Close();
+            IndexReader.Open(dir, true).Close();
 
             long endDiskUsage = dir.GetMaxUsedSizeInBytes();
 
@@ -1678,14 +1557,14 @@ namespace Lucene.Net.Index
             // and it doesn't delete intermediate segments then it
             // will exceed this 105X:
             // System.out.println("start " + startDiskUsage + "; mid " + midDiskUsage + ";end " + endDiskUsage);
-            Assert.IsTrue(midDiskUsage < 150 * startDiskUsage, "writer used too much space while adding documents when autoCommit=false: mid=" + midDiskUsage + " start=" + startDiskUsage + " end=" + endDiskUsage);
-            Assert.IsTrue(endDiskUsage < 150 * startDiskUsage, "writer used too much space after close when autoCommit=false endDiskUsage=" + endDiskUsage + " startDiskUsage=" + startDiskUsage);
+            Assert.IsTrue(midDiskUsage < 150 * startDiskUsage, "writer used too much space while adding documents: mid=" + midDiskUsage + " start=" + startDiskUsage + " end=" + endDiskUsage);
+            Assert.IsTrue(endDiskUsage < 150 * startDiskUsage, "writer used too much space after close: endDiskUsage=" + endDiskUsage + " startDiskUsage=" + startDiskUsage);
         }
 
 
         /*
         * Verify that calling optimize when writer is open for
-        * "commit on close" works correctly both for abort()
+        * "commit on close" works correctly both for Rollback()
         * and close().
         */
         [Test]
@@ -1704,7 +1583,7 @@ namespace Lucene.Net.Index
             writer.Optimize();
 
             // Open a reader before closing (commiting) the writer:
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
 
             // Reader should see index as unoptimized at this
             // point:
@@ -1712,11 +1591,11 @@ namespace Lucene.Net.Index
             reader.Close();
 
             // Abort the writer:
-            writer.Abort();
+            writer.Rollback();
             AssertNoUnreferencedFiles(dir, "aborted writer after optimize");
 
             // Open a reader after aborting writer:
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
 
             // Reader should still see index as unoptimized:
             Assert.IsFalse(reader.IsOptimized(), "Reader incorrectly sees that the index is optimized");
@@ -1728,7 +1607,7 @@ namespace Lucene.Net.Index
             AssertNoUnreferencedFiles(dir, "aborted writer after optimize");
 
             // Open a reader after aborting writer:
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
 
             // Reader should still see index as unoptimized:
             Assert.IsTrue(reader.IsOptimized(), "Reader incorrectly sees that the index is unoptimized");
@@ -1740,19 +1619,19 @@ namespace Lucene.Net.Index
         {
             RAMDirectory dir = new RAMDirectory();
             IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.Flush();
+            writer.Commit();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.MaxDoc());
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
 
             writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
-            writer.Flush();
+            writer.Commit();
             writer.Close();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.MaxDoc());
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
@@ -1777,7 +1656,7 @@ namespace Lucene.Net.Index
             }
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(100, reader.MaxDoc());
             Assert.AreEqual(100, reader.NumDocs());
             for (int j = 0; j < 100; j++)
@@ -2022,7 +1901,7 @@ namespace Lucene.Net.Index
             }
             writer.Close();
 
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("field", "aaa")), null, 1000).ScoreDocs;
             Assert.AreEqual(300, hits.Length);
             searcher.Close();
@@ -2052,7 +1931,7 @@ namespace Lucene.Net.Index
 
             Term searchTerm = new Term("field", "aaa");
 
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(10, hits.Length);
             searcher.Close();
@@ -2072,12 +1951,12 @@ namespace Lucene.Net.Index
                 writer.AddDocument(doc);
             }
             writer.Close();
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(27, hits.Length);
             searcher.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             reader.Close();
 
             dir.Close();
@@ -2103,7 +1982,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(1, reader.MaxDoc());
             Assert.AreEqual(1, reader.NumDocs());
             Term t = new Term("field", "a");
@@ -2133,7 +2012,7 @@ namespace Lucene.Net.Index
             }
             writer.Close();
             Term searchTerm = new Term("content", "aaa");
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(100, hits.Length, "did not get right number of hits");
             writer.Close();
@@ -2173,11 +2052,11 @@ namespace Lucene.Net.Index
             Document doc = new Document();
             doc.Add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
             writer.AddDocument(doc);
-            writer.Flush();
+            writer.Commit();
             writer.AddDocument(new Document());
             writer.Close();
             _TestUtil.CheckIndex(dir);
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(2, reader.NumDocs());
         }
 
@@ -2204,7 +2083,7 @@ namespace Lucene.Net.Index
                 if (0 == pass)
                 {
                     writer.Close();
-                    IndexReader reader = IndexReader.Open(dir);
+                    IndexReader reader = IndexReader.Open(dir, false);
                     Assert.IsTrue(reader.IsOptimized());
                     reader.Close();
                 }
@@ -2216,7 +2095,7 @@ namespace Lucene.Net.Index
                     writer.AddDocument(doc);
                     writer.Close();
 
-                    IndexReader reader = IndexReader.Open(dir);
+                    IndexReader reader = IndexReader.Open(dir, true);
                     Assert.IsTrue(!reader.IsOptimized());
                     reader.Close();
 
@@ -2229,7 +2108,8 @@ namespace Lucene.Net.Index
             dir.Close();
         }
 
-        private void RmDir(System.IO.FileInfo dir)
+        // TODO: Could be less complicated.
+        private void RmDir(System.IO.DirectoryInfo dir)
         {
             System.IO.FileInfo[] files = FileSupport.GetFiles(dir);
             if (files != null)
@@ -2279,7 +2159,7 @@ namespace Lucene.Net.Index
         public virtual void TestBadSegment()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 
             Document document = new Document();
             document.Add(new Field("tvtest", "", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
@@ -2293,7 +2173,7 @@ namespace Lucene.Net.Index
         public virtual void TestNoTermVectorAfterTermVector()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
             Document document = new Document();
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
@@ -2301,12 +2181,12 @@ namespace Lucene.Net.Index
             document.Add(new Field("tvtest", "x y z", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
             iw.AddDocument(document);
             // Make first segment
-            iw.Flush();
+            iw.Commit();
 
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
             // Make 2nd segment
-            iw.Flush();
+            iw.Commit();
 
             iw.Optimize();
             iw.Close();
@@ -2318,24 +2198,24 @@ namespace Lucene.Net.Index
         public virtual void TestNoTermVectorAfterTermVectorMerge()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
             Document document = new Document();
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
-            iw.Flush();
+            iw.Commit();
 
             document = new Document();
             document.Add(new Field("tvtest", "x y z", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
             iw.AddDocument(document);
             // Make first segment
-            iw.Flush();
+            iw.Commit();
 
             iw.Optimize();
 
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
             // Make 2nd segment
-            iw.Flush();
+            iw.Commit();
             iw.Optimize();
 
             iw.Close();
@@ -2350,7 +2230,7 @@ namespace Lucene.Net.Index
             try
             {
                 MockRAMDirectory dir = new MockRAMDirectory();
-                IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+                IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
                 Document document = new Document();
                 document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
                 iw.SetMaxBufferedDocs(2);
@@ -2414,7 +2294,7 @@ namespace Lucene.Net.Index
         public virtual void TestSetMaxMergeDocs()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
             iw.SetMergeScheduler(new MyMergeScheduler(this));
             iw.SetMaxMergeDocs(20);
             iw.SetMaxBufferedDocs(2);
@@ -2456,7 +2336,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
 
             writer.Close();
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Term t = new Term("content", "aa");
             Assert.AreEqual(reader.DocFreq(t), 3);
 
@@ -2546,7 +2426,7 @@ namespace Lucene.Net.Index
             }
             Assert.IsTrue(hitError);
             writer.Close();
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(198, reader.DocFreq(new Term("content", "aa")));
             reader.Close();
         }
@@ -2624,7 +2504,7 @@ namespace Lucene.Net.Index
                 }
                 writer.Close();
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 int expected = 3 + (1 - i) * 2;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2652,7 +2532,7 @@ namespace Lucene.Net.Index
                 writer.Optimize();
                 writer.Close();
 
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 expected = 19 + (1 - i) * 2;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2704,7 +2584,7 @@ namespace Lucene.Net.Index
                     writer.Close();
                 }
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 int expected = (3 + (1 - i) * 2) * NUM_THREAD * NUM_ITER;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2732,7 +2612,7 @@ namespace Lucene.Net.Index
                 writer2.Optimize();
                 writer2.Close();
 
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 expected += 17 - NUM_THREAD * NUM_ITER;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2792,7 +2672,7 @@ namespace Lucene.Net.Index
                     writer.AddDocument(doc);
 
                 writer.Close();
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, false);
                 reader.DeleteDocument(delID++);
                 reader.Close();
 
@@ -2815,17 +2695,16 @@ namespace Lucene.Net.Index
             Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
             doc.Add(idField);
 
-            for (int pass = 0; pass < 3; pass++)
+            for (int pass = 0; pass < 2; pass++)
             {
-                bool autoCommit = pass % 2 == 0;
-                IndexWriter writer = new IndexWriter(directory, autoCommit, new WhitespaceAnalyzer(), true);
+                IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 
-                //System.out.println("TEST: pass=" + pass + " ac=" + autoCommit + " cms=" + (pass >= 2));
+                //System.out.println("TEST: pass=" + pass + " cms=" + (pass >= 2));
                 for (int iter = 0; iter < 10; iter++)
                 {
                     //System.out.println("TEST: iter=" + iter);
                     MergeScheduler ms;
-                    if (pass >= 2)
+                    if (pass == 1)
                         ms = new ConcurrentMergeScheduler();
                     else
                         ms = new SerialMergeScheduler();
@@ -2866,11 +2745,11 @@ namespace Lucene.Net.Index
                     t1.Join();
 
                     // Make sure reader can read
-                    IndexReader reader = IndexReader.Open(directory);
+                    IndexReader reader = IndexReader.Open(directory, true);
                     reader.Close();
 
                     // Reopen
-                    writer = new IndexWriter(directory, autoCommit, new WhitespaceAnalyzer(), false);
+                    writer = new IndexWriter(directory, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
                 }
                 writer.Close();
             }
@@ -2939,8 +2818,7 @@ namespace Lucene.Net.Index
                             }
                             catch (System.Threading.ThreadInterruptedException ie)
                             {
-                                ThreadClass.Current().Interrupt();
-                                throw new System.SystemException("", ie);
+                                throw;
                             }
                             if (fullCount++ >= 5)
                                 break;
@@ -3027,7 +2905,7 @@ namespace Lucene.Net.Index
                 }
 
                 // Quick test to make sure index is not corrupt:
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 TermDocs tdocs = reader.TermDocs(new Term("field", "aaa"));
                 int count = 0;
                 while (tdocs.Next())
@@ -3092,7 +2970,7 @@ namespace Lucene.Net.Index
             for (int iter = 0; iter < 10; iter++)
             {
                 MockRAMDirectory dir = new MockRAMDirectory();
-                IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
                 ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
                 // We expect disk full exceptions in the merge threads
                 cms.SetSuppressExceptions();
@@ -3164,7 +3042,7 @@ namespace Lucene.Net.Index
         {
             MockRAMDirectory dir = new MockRAMDirectory();
 
-            IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             writer.SetMaxBufferedDocs(2);
             Document doc = new Document();
             doc.Add(new Field("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -3178,6 +3056,7 @@ namespace Lucene.Net.Index
             {
                 writer.AddDocument(doc);
                 writer.AddDocument(doc);
+                writer.Commit();
                 Assert.Fail("did not hit exception");
             }
             catch (System.IO.IOException ioe)
@@ -3239,7 +3118,7 @@ namespace Lucene.Net.Index
 
                 if (success)
                 {
-                    IndexReader reader = IndexReader.Open(dir);
+                    IndexReader reader = IndexReader.Open(dir, true);
                     for (int j = 0; j < reader.MaxDoc(); j++)
                     {
                         if (!reader.IsDeleted(j))
@@ -3256,7 +3135,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), is OK:
+        // IOException during Rollback(), is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbort()
         {
@@ -3264,7 +3143,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), is OK:
+        // IOException during Rollback(), is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbortOnlyOnce()
         {
@@ -3272,7 +3151,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), with multiple threads, is OK:
+        // IOException during Rollback(), with multiple threads, is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbortWithThreads()
         {
@@ -3280,7 +3159,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), with multiple threads, is OK:
+        // IOException during Rollback(), with multiple threads, is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbortWithThreadsOnlyOnce()
         {
@@ -3415,7 +3294,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Term t = new Term("field", "x");
             Assert.AreEqual(1, reader.DocFreq(t));
             reader.Close();
@@ -3456,7 +3335,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
             }
             catch (System.IO.IOException e)
             {
@@ -3479,7 +3358,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < 23; i++)
                 AddDoc(writer);
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             writer.Commit();
             IndexReader reader2 = reader.Reopen();
@@ -3491,12 +3370,12 @@ namespace Lucene.Net.Index
                 AddDoc(writer);
             Assert.AreEqual(23, reader2.NumDocs());
             reader2.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
             writer.Commit();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(40, reader.NumDocs());
             reader.Close();
             writer.Close();
@@ -3534,25 +3413,36 @@ namespace Lucene.Net.Index
             FailOnlyInSync failure = new FailOnlyInSync();
             dir.FailOn(failure);
 
-            IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             failure.SetDoFail();
 
             ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
-            // We expect sync exceptions in the merge threads
-            cms.SetSuppressExceptions();
             writer.SetMergeScheduler(cms);
             writer.SetMaxBufferedDocs(2);
             writer.SetMergeFactor(5);
 
             for (int i = 0; i < 23; i++)
+            {
                 AddDoc(writer);
+                if ((i - 1) % 2 == 0)
+                {
+                    try
+                    {
+                        writer.Commit();
+                    }
+                    catch (System.IO.IOException ioe)
+                    {
+                        //expected
+                    }
+                }
+            }
 
             cms.Sync();
             Assert.IsTrue(failure.didFail);
             failure.ClearDoFail();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
             dir.Close();
@@ -3567,7 +3457,8 @@ namespace Lucene.Net.Index
             for (int iter = 0; iter < 4; iter++)
             {
                 bool autoCommit = 1 == iter / 2;
-                IndexWriter writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
+                IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT),
+                                                     IndexWriter.MaxFieldLength.UNLIMITED);
                 writer.SetMaxBufferedDocs(2);
                 writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
                 writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3589,7 +3480,7 @@ namespace Lucene.Net.Index
                 writer.Optimize();
                 writer.Close();
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 for (int i = 0; i < reader.NumDocs(); i++)
                 {
                     reader.Document(i);
@@ -3597,14 +3488,15 @@ namespace Lucene.Net.Index
                 }
                 reader.Close();
 
-                writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
+                writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
                 writer.SetMaxBufferedDocs(2);
                 writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
                 writer.SetMergeScheduler(new SerialMergeScheduler());
                 writer.SetMergePolicy(new LogDocMergePolicy(writer));
 
                 Directory[] indexDirs = new Directory[] { new MockRAMDirectory(dir) };
-                writer.AddIndexes(indexDirs);
+                writer.AddIndexesNoOptimize(indexDirs);
+                writer.Optimize();
                 writer.Close();
             }
             dir.Close();
@@ -3615,10 +3507,9 @@ namespace Lucene.Net.Index
         public virtual void TestTermVectorCorruption2()
         {
             Directory dir = new MockRAMDirectory();
-            for (int iter = 0; iter < 4; iter++)
+            for (int iter = 0; iter < 2; iter++)
             {
-                bool autoCommit = 1 == iter / 2;
-                IndexWriter writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
+                IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
                 writer.SetMaxBufferedDocs(2);
                 writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
                 writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3639,7 +3530,7 @@ namespace Lucene.Net.Index
                 writer.Optimize();
                 writer.Close();
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 Assert.IsTrue(reader.GetTermFreqVectors(0) == null);
                 Assert.IsTrue(reader.GetTermFreqVectors(1) == null);
                 Assert.IsTrue(reader.GetTermFreqVectors(2) != null);
@@ -3653,7 +3544,7 @@ namespace Lucene.Net.Index
         public virtual void TestTermVectorCorruption3()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
             writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3670,7 +3561,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
             writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3681,7 +3572,7 @@ namespace Lucene.Net.Index
             writer.Optimize();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             for (int i = 0; i < 10; i++)
             {
                 reader.GetTermFreqVectors(i);
@@ -3708,7 +3599,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Term t = new Term("field", "x");
             Assert.AreEqual(1, reader.DocFreq(t));
             reader.Close();
@@ -3721,7 +3612,7 @@ namespace Lucene.Net.Index
         public virtual void TestExpungeDeletes()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
 
@@ -3736,7 +3627,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             Assert.AreEqual(10, ir.MaxDoc());
             Assert.AreEqual(10, ir.NumDocs());
             ir.DeleteDocument(0);
@@ -3744,7 +3635,7 @@ namespace Lucene.Net.Index
             Assert.AreEqual(8, ir.NumDocs());
             ir.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             Assert.AreEqual(8, writer.NumDocs());
             Assert.AreEqual(10, writer.MaxDoc());
             writer.ExpungeDeletes();
@@ -3762,7 +3653,7 @@ namespace Lucene.Net.Index
         public virtual void TestExpungeDeletes2()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetMergeFactor(50);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -3778,7 +3669,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             Assert.AreEqual(98, ir.MaxDoc());
             Assert.AreEqual(98, ir.NumDocs());
             for (int i = 0; i < 98; i += 2)
@@ -3786,12 +3677,12 @@ namespace Lucene.Net.Index
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMergeFactor(3);
             Assert.AreEqual(49, writer.NumDocs());
             writer.ExpungeDeletes();
             writer.Close();
-            ir = IndexReader.Open(dir);
+            ir = IndexReader.Open(dir, true);
             Assert.AreEqual(49, ir.MaxDoc());
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
@@ -3804,7 +3695,7 @@ namespace Lucene.Net.Index
         public virtual void TestExpungeDeletes3()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetMergeFactor(50);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -3820,7 +3711,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, false);
             Assert.AreEqual(98, ir.MaxDoc());
             Assert.AreEqual(98, ir.NumDocs());
             for (int i = 0; i < 98; i += 2)
@@ -3828,12 +3719,12 @@ namespace Lucene.Net.Index
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             // Force many merges to happen
             writer.SetMergeFactor(3);
             writer.ExpungeDeletes(false);
             writer.Close();
-            ir = IndexReader.Open(dir);
+            ir = IndexReader.Open(dir, true);
             Assert.AreEqual(49, ir.MaxDoc());
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
@@ -3845,7 +3736,7 @@ namespace Lucene.Net.Index
         public virtual void TestEmptyFieldName()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             Document doc = new Document();
             doc.Add(new Field("", "a b c", Field.Store.NO, Field.Index.ANALYZED));
             writer.AddDocument(doc);
@@ -4064,7 +3955,7 @@ namespace Lucene.Net.Index
             Assert.IsTrue(w.afterWasCalled);
             w.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             Assert.AreEqual(1, ir.MaxDoc());
             Assert.AreEqual(0, ir.NumDocs());
             ir.Close();
@@ -4137,7 +4028,7 @@ namespace Lucene.Net.Index
                 // Expected
             }
             Assert.IsTrue(failure.fail1 && failure.fail2);
-            w.Abort();
+            w.Rollback();
             dir.Close();
         }
 
@@ -4157,7 +4048,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             Document doc2 = ir.Document(0);
             for (int i = 0; i < count; i++)
             {
@@ -4380,7 +4271,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Commit();
 
-            IndexSearcher s = new IndexSearcher(dir);
+            IndexSearcher s = new IndexSearcher(dir, false);
             PhraseQuery pq = new PhraseQuery();
             pq.Add(new Term("field", "a"));
             pq.Add(new Term("field", "b"));
@@ -4415,12 +4306,12 @@ namespace Lucene.Net.Index
             for (int i = 0; i < 23; i++)
                 AddDoc(writer);
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
 
             writer.PrepareCommit();
 
-            IndexReader reader2 = IndexReader.Open(dir);
+            IndexReader reader2 = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader2.NumDocs());
 
             writer.Commit();
@@ -4437,18 +4328,18 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual(23, reader3.NumDocs());
             reader3.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
 
             writer.PrepareCommit();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
 
             writer.Commit();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(40, reader.NumDocs());
             reader.Close();
             writer.Close();
@@ -4470,12 +4361,12 @@ namespace Lucene.Net.Index
             for (int i = 0; i < 23; i++)
                 AddDoc(writer);
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
 
             writer.PrepareCommit();
 
-            IndexReader reader2 = IndexReader.Open(dir);
+            IndexReader reader2 = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader2.NumDocs());
 
             writer.Rollback();
@@ -4493,18 +4384,18 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual(0, reader3.NumDocs());
             reader3.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
 
             writer.PrepareCommit();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
 
             writer.Commit();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(17, reader.NumDocs());
             reader.Close();
             writer.Close();
@@ -4522,7 +4413,7 @@ namespace Lucene.Net.Index
             writer.Commit();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
             dir.Close();
@@ -4608,7 +4499,7 @@ namespace Lucene.Net.Index
 
                 readers = new IndexReader[NUM_COPY];
                 for (int i = 0; i < NUM_COPY; i++)
-                    readers[i] = IndexReader.Open(dir);
+                    readers[i] = IndexReader.Open(dir, true);
             }
 
             internal virtual void LaunchThreads(int numIter)
@@ -4682,7 +4573,8 @@ namespace Lucene.Net.Index
                 {
 
                     case 0:
-                        writer2.AddIndexes(dirs);
+                        writer2.AddIndexesNoOptimize(dirs);
+                        writer2.Optimize();
                         break;
 
                     case 1:
@@ -4724,7 +4616,7 @@ namespace Lucene.Net.Index
 
             _TestUtil.CheckIndex(c.dir2);
 
-            IndexReader reader = IndexReader.Open(c.dir2);
+            IndexReader reader = IndexReader.Open(c.dir2, true);
             Assert.AreEqual(100 + NUM_COPY * (3 * NUM_ITER / 4) * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_THREADS * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_INIT_DOCS, reader.NumDocs());
             reader.Close();
 
@@ -4814,7 +4706,8 @@ namespace Lucene.Net.Index
                 {
 
                     case 0:
-                        writer2.AddIndexes(dirs);
+                        writer2.AddIndexesNoOptimize(dirs);
+                        writer2.Optimize();
                         break;
 
                     case 1:
@@ -4991,7 +4884,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             doc = ir.Document(0);
             f = doc.GetField("binary");
             b = f.GetBinaryValue();
@@ -5002,63 +4895,6 @@ namespace Lucene.Net.Index
             dir.Close();
         }
 
-        // LUCENE-1374
-        [Test]
-        public virtual void TestMergeCompressedFields()
-        {
-            System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "mergecompressedfields"));
-            Directory dir = FSDirectory.Open(indexDir);
-            try
-            {
-                for (int i = 0; i < 5; i++)
-                {
-                    // Must make a new writer & doc each time, w/
-                    // different fields, so bulk merge of stored fields
-                    // cannot run:
-                    IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), i == 0, IndexWriter.MaxFieldLength.UNLIMITED);
-                    try
-                    {
-                        w.SetMergeFactor(5);
-                        w.SetMergeScheduler(new SerialMergeScheduler());
-                        Document doc = new Document();
-                        doc.Add(new Field("test1", "this is some data that will be compressed this this this", Field.Store.COMPRESS, Field.Index.NO));
-                        doc.Add(new Field("test2", new byte[20], Field.Store.COMPRESS));
-                        doc.Add(new Field("field" + i, "random field", Field.Store.NO, Field.Index.ANALYZED));
-                        w.AddDocument(doc);
-                    }
-                    finally
-                    {
-                        w.Close();
-                    }
-                }
-
-                byte[] cmp = new byte[20];
-
-                IndexReader r = IndexReader.Open(dir);
-                try
-                {
-                    for (int i = 0; i < 5; i++)
-                    {
-                        Document doc = r.Document(i);
-                        Assert.AreEqual(doc.GetField("test1").StringValue(), "this is some data that will be compressed this this this");
-                        byte[] b = doc.GetField("test2").BinaryValue();
-                        Assert.AreEqual(b.Length, cmp.Length);
-                        for (int j = 0; j < b.Length; j++)
-                            Assert.AreEqual(b[j], cmp[j]);
-                    }
-                }
-                finally
-                {
-                    r.Close();
-                }
-            }
-            finally
-            {
-                dir.Close();
-                _TestUtil.RmDir(indexDir);
-            }
-        }
-
         // LUCENE-1382
         [Test]
         public virtual void TestCommitUserData()
@@ -5072,7 +4908,7 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual(0, IndexReader.GetCommitUserData(dir).Count);
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             // commit(Map) never called for this index
             Assert.AreEqual(0, r.GetCommitUserData().Count);
             r.Close();
@@ -5088,7 +4924,7 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual("test1", IndexReader.GetCommitUserData(dir)["label"]);
 
-            r = IndexReader.Open(dir);
+            r = IndexReader.Open(dir, true);
             Assert.AreEqual("test1", r.GetCommitUserData()["label"]);
             r.Close();
 
@@ -5139,7 +4975,7 @@ namespace Lucene.Net.Index
 
             System.Collections.IList thrown = new System.Collections.ArrayList();
 
-            IndexWriter writer = new AnonymousClassIndexWriter(thrown, this, new MockRAMDirectory(), new StandardAnalyzer());
+            IndexWriter writer = new AnonymousClassIndexWriter(thrown, this, new MockRAMDirectory(), new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 
             // need to set an info stream so message is called
             writer.SetInfoStream(new System.IO.StreamWriter(new System.IO.MemoryStream()));
@@ -5172,7 +5008,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
 
             // Token "" occurred once
@@ -5206,7 +5042,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5230,7 +5066,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5256,7 +5092,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5284,7 +5120,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5300,7 +5136,7 @@ namespace Lucene.Net.Index
         public virtual void TestEndOffsetPositionStopFilter()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter w = new IndexWriter(dir, new StopAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter w = new IndexWriter(dir, new StopAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             Document doc = new Document();
             Field f = new Field("field", "abcd the", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
             doc.Add(f);
@@ -5308,7 +5144,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5324,7 +5160,7 @@ namespace Lucene.Net.Index
         public virtual void TestEndOffsetPositionStandard()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             Document doc = new Document();
             Field f = new Field("field", "abcd the  ", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
             Field f2 = new Field("field", "crunch man", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -5333,7 +5169,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermPositionVector tpv = ((TermPositionVector)r.GetTermFreqVector(0, "field"));
             TermVectorOffsetInfo[] termOffsets = tpv.GetOffsets(0);
             Assert.AreEqual(1, termOffsets.Length);
@@ -5354,7 +5190,7 @@ namespace Lucene.Net.Index
         public virtual void TestEndOffsetPositionStandardEmptyField()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             Document doc = new Document();
             Field f = new Field("field", "", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
             Field f2 = new Field("field", "crunch man", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -5363,7 +5199,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermPositionVector tpv = ((TermPositionVector)r.GetTermFreqVector(0, "field"));
             TermVectorOffsetInfo[] termOffsets = tpv.GetOffsets(0);
             Assert.AreEqual(1, termOffsets.Length);
@@ -5381,7 +5217,7 @@ namespace Lucene.Net.Index
         public virtual void TestEndOffsetPositionStandardEmptyField2()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             Document doc = new Document();
 
             Field f = new Field("field", "abcd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -5394,7 +5230,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermPositionVector tpv = ((TermPositionVector)r.GetTermFreqVector(0, "field"));
             TermVectorOffsetInfo[] termOffsets = tpv.GetOffsets(0);
             Assert.AreEqual(1, termOffsets.Length);
@@ -5414,7 +5250,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestOtherFiles()
         {
-            System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "otherfiles"));
+            System.IO.DirectoryInfo indexDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "otherfiles"));
             Directory dir = FSDirectory.Open(indexDir);
             try
             {
@@ -5443,7 +5279,7 @@ namespace Lucene.Net.Index
         public virtual void TestDeadlock()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             writer.SetMaxBufferedDocs(2);
             Document doc = new Document();
             doc.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -5458,12 +5294,12 @@ namespace Lucene.Net.Index
             writer2.AddDocument(doc);
             writer2.Close();
 
-            IndexReader r1 = IndexReader.Open(dir2);
+            IndexReader r1 = IndexReader.Open(dir2, true);
             IndexReader r2 = (IndexReader)r1.Clone();
             writer.AddIndexes(new IndexReader[] { r1, r2 });
             writer.Close();
 
-            IndexReader r3 = IndexReader.Open(dir);
+            IndexReader r3 = IndexReader.Open(dir, true);
             Assert.AreEqual(5, r3.NumDocs());
             r3.Close();
 
@@ -5495,58 +5331,76 @@ namespace Lucene.Net.Index
             }
             internal volatile bool failed;
             internal volatile bool finish;
+
+            internal bool AllowInterrupt = false;
+
             override public void Run()
             {
                 bool endLoop = false;
                 RAMDirectory dir = new RAMDirectory();
                 IndexWriter w = null;
+                bool first = true;
                 while (!finish)
                 {
                     try
                     {
-                        //IndexWriter.unlock(dir);
-                        w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
-                        ((ConcurrentMergeScheduler)w.GetMergeScheduler()).SetSuppressExceptions();
-                        //w.setInfoStream(System.out);
-                        w.SetMaxBufferedDocs(2);
-                        w.SetMergeFactor(2);
-                        Document doc = new Document();
-                        doc.Add(new Field("field", "some text contents", Field.Store.YES, Field.Index.ANALYZED));
-                        for (int i = 0; i < 100; i++)
-                        {
-                            w.AddDocument(doc);
-                            w.Commit();
-                        }
-                    }
-                    catch (System.SystemException re)
-                    {
-                        System.Exception e = re.InnerException;
-                        if (re is System.Threading.ThreadInterruptedException || e is System.Threading.ThreadInterruptedException)
+                        while (true)
                         {
-                            // {{Aroush-2.9}} in Java, this is: java.lang.Thread.interrupted()
-                            //{There is no way in .Net to check interrupted state. So comment it out
+                            if (w != null)
+                            {
+                                w.Close();
+                            }
+                            w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+                            //((ConcurrentMergeScheduler) w.GetMergeScheduler()).SetSuppressExceptions();
+                            if (!first && !AllowInterrupt)
+                            {
+                                // tell main thread it can interrupt us at any time,
+                                // starting now
+                                AllowInterrupt = true;
+                            }
 
-                            //// Make sure IW restored interrupted bit
-                            //if ((Instance.ThreadState & (System.Threading.ThreadState.Stopped | System.Threading.ThreadState.Unstarted)) != System.Threading.ThreadState.Running)  // {{Aroush-2.9}} in Java, this is: java.lang.Thread.interrupted()
-                            //{
-                            //    System.Console.Out.WriteLine("FAILED; InterruptedException hit but thread.interrupted() was false");
-                            //    System.Console.Out.WriteLine(e.StackTrace);
-                            //    failed = true;
-                            //    break;
-                            //}
-                        }
-                        else
-                        {
-                            System.Console.Out.WriteLine("FAILED; unexpected exception");
-                            if (e != null)
+                            w.SetMaxBufferedDocs(2);
+                            w.SetMergeFactor(2);
+                            Document doc = new Document();
+                            doc.Add(new Field("field", "some text contents", Field.Store.YES, Field.Index.ANALYZED));
+                            for (int i = 0; i < 100; i++)
                             {
-                                System.Console.Out.WriteLine(e.StackTrace);
+                                w.AddDocument(doc);
+                                w.Commit();
                             }
-                            else
+                            w.Close();
+                            _TestUtil.CheckIndex(dir);
+                            IndexReader.Open(dir, true).Close();
+                            if (first && !AllowInterrupt)
                             {
-                                System.Console.Out.WriteLine(re.StackTrace);
+                                // Strangely, if we interrupt a thread before

[... 243 lines stripped ...]


Mime
View raw message