lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1294875 [37/45] - in /incubator/lucene.net/trunk: ./ build/ build/vs2010/contrib/ build/vs2010/test/ doc/ src/ src/contrib/Analyzers/ src/contrib/Analyzers/AR/ src/contrib/Analyzers/BR/ src/contrib/Analyzers/CJK/ src/contrib/Analyzers/Cn/ ...
Date Tue, 28 Feb 2012 22:43:28 GMT
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriter.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using System.Threading;
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -53,25 +54,17 @@ using ScoreDoc = Lucene.Net.Search.Score
 using TermQuery = Lucene.Net.Search.TermQuery;
 using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
 using _TestUtil = Lucene.Net.Util._TestUtil;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Index
 {
-	
-	/// <summary> </summary>
-	/// <version>  $Id: TestIndexWriter.java 831036 2009-10-29 17:15:12Z mikemccand $
-	/// </version>
     [TestFixture]
     public class TestIndexWriter : BaseTokenStreamTestCase
     {
-        internal static System.Collections.Hashtable testWithNewApiData = new System.Collections.Hashtable();
-        static TestIndexWriter()
-        {
-            System.String[] data = new System.String[] { "TestExceptionFromTokenStream", "TestDocumentsWriterExceptions", "TestNegativePositions", "TestEndOffsetPositionWithCachingTokenFilter", "TestEndOffsetPositionWithTeeSinkTokenFilter", "TestEndOffsetPositionStandard", "TestEndOffsetPositionStandardEmptyField", "TestEndOffsetPositionStandardEmptyField2" };
-            for (int i = 0; i < data.Length; i++)
-            {
-                testWithNewApiData.Add(data[i], data[i]);
-            }
-        }
+        //public TestIndexWriter(System.String name)
+        //    : base(name)
+        //{
+        //}
 
         public class MyRAMDirectory : RAMDirectory
         {
@@ -153,7 +146,7 @@ namespace Lucene.Net.Index
 
             public override /*virtual*/ TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
             {
-                return new AnonymousClassTokenFilter(this, new StandardTokenizer(reader));
+                return new AnonymousClassTokenFilter(this, new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader));
             }
         }
         private class AnonymousClassAnalyzer1 : Analyzer
@@ -204,7 +197,7 @@ namespace Lucene.Net.Index
                 return new CrashingFilter(this.enclosingInstance, fieldName, new WhitespaceTokenizer(reader));
             }
         }
-        private class AnonymousClassThread : SupportClass.ThreadClass
+        private class AnonymousClassThread : ThreadClass
         {
             public AnonymousClassThread(int NUM_ITER, IndexWriter writer, int finalI, TestIndexWriter enclosingInstance)
             {
@@ -263,14 +256,14 @@ namespace Lucene.Net.Index
                 {
                     lock (this)
                     {
-                        System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": ERROR: hit unexpected exception");
+                        System.Console.Out.WriteLine(ThreadClass.Current().Name + ": ERROR: hit unexpected exception");
                         System.Console.Out.WriteLine(t.StackTrace);
                     }
                     Assert.Fail();
                 }
             }
         }
-        private class AnonymousClassThread1 : SupportClass.ThreadClass
+        private class AnonymousClassThread1 : ThreadClass
         {
             public AnonymousClassThread1(IndexWriter finalWriter, Document doc, System.Collections.ArrayList failure, TestIndexWriter enclosingInstance)
             {
@@ -361,8 +354,8 @@ namespace Lucene.Net.Index
             private void InitBlock(TestIndexWriter enclosingInstance)
             {
                 this.enclosingInstance = enclosingInstance;
-                this.termAtt = (TermAttribute)this.AddAttribute(typeof(TermAttribute));
-                this.posIncrAtt = (PositionIncrementAttribute)this.AddAttribute(typeof(PositionIncrementAttribute));
+                this.termAtt = this.AddAttribute<TermAttribute>();
+                this.posIncrAtt = this.AddAttribute<PositionIncrementAttribute>();
             }
             private TestIndexWriter enclosingInstance;
             public TestIndexWriter Enclosing_Instance
@@ -389,11 +382,16 @@ namespace Lucene.Net.Index
                 first = false;
                 return true;
             }
+
+            protected override void Dispose(bool disposing)
+            {
+                // Do nothing
+            }
         }
         private class AnonymousClassIndexWriter : IndexWriter
         {
             public AnonymousClassIndexWriter(System.Collections.IList thrown, TestIndexWriter enclosingInstance, MockRAMDirectory mockRAMDir, StandardAnalyzer standardAnalyzer)
-                : base(mockRAMDir, standardAnalyzer)
+                : base(mockRAMDir, standardAnalyzer, MaxFieldLength.UNLIMITED)
             {
                 InitBlock(thrown, enclosingInstance);
             }
@@ -421,15 +419,6 @@ namespace Lucene.Net.Index
                 }
             }
         }
-        public TestIndexWriter(System.String name)
-            : base(name, testWithNewApiData)
-        {
-        }
-
-        public TestIndexWriter()
-            : base("", testWithNewApiData)
-        {
-        }
 
         [Test]
         public virtual void TestDocCount()
@@ -452,11 +441,11 @@ namespace Lucene.Net.Index
             {
                 AddDoc(writer);
             }
-            Assert.AreEqual(100, writer.DocCount());
+            Assert.AreEqual(100, writer.MaxDoc());
             writer.Close();
 
             // delete 40 documents
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, false);
             for (i = 0; i < 40; i++)
             {
                 reader.DeleteDocument(i);
@@ -465,16 +454,16 @@ namespace Lucene.Net.Index
 
             // test doc count before segments are merged/index is optimized
             writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-            Assert.AreEqual(100, writer.DocCount());
+            Assert.AreEqual(100, writer.MaxDoc());
             writer.Close();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(100, reader.MaxDoc());
             Assert.AreEqual(60, reader.NumDocs());
             reader.Close();
 
             // optimize the index and check that the new doc count is correct
-            writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             Assert.AreEqual(100, writer.MaxDoc());
             Assert.AreEqual(60, writer.NumDocs());
             writer.Optimize();
@@ -483,7 +472,7 @@ namespace Lucene.Net.Index
             writer.Close();
 
             // check that the index reader gives the same numbers.
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(60, reader.MaxDoc());
             Assert.AreEqual(60, reader.NumDocs());
             reader.Close();
@@ -513,7 +502,7 @@ namespace Lucene.Net.Index
 
         /*
         Test: make sure when we run out of disk space or hit
-        random IOExceptions in any of the addIndexes(*) calls
+        random IOExceptions in any of the AddIndexesNoOptimize(*) calls
         that 1) index is not corrupt (searcher can open/search
         it) and 2) transactional semantics are followed:
         either all or none of the incoming documents were in
@@ -529,7 +518,7 @@ namespace Lucene.Net.Index
             bool debug = false;
 
             // Build up a bunch of dirs that have indexes which we
-            // will then merge together by calling addIndexes(*):
+            // will then merge together by calling AddIndexesNoOptimize(*):
             Directory[] dirs = new Directory[NUM_DIR];
             long inputDiskUsage = 0;
             for (int i = 0; i < NUM_DIR; i++)
@@ -549,7 +538,7 @@ namespace Lucene.Net.Index
             }
 
             // Now, build a starting index that has START_COUNT docs.  We
-            // will then try to addIndexes into a copy of this:
+            // will then try to AddIndexesNoOptimize into a copy of this:
             RAMDirectory startDir = new RAMDirectory();
             IndexWriter writer2 = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
             for (int j = 0; j < START_COUNT; j++)
@@ -560,7 +549,7 @@ namespace Lucene.Net.Index
 
             // Make sure starting index seems to be working properly:
             Term searchTerm = new Term("content", "aaa");
-            IndexReader reader = IndexReader.Open(startDir);
+            IndexReader reader = IndexReader.Open(startDir, true);
             Assert.AreEqual(57, reader.DocFreq(searchTerm), "first docFreq");
 
             IndexSearcher searcher = new IndexSearcher(reader);
@@ -571,12 +560,12 @@ namespace Lucene.Net.Index
 
             // Iterate with larger and larger amounts of free
             // disk space.  With little free disk space,
-            // addIndexes will certainly run out of space &
+            // AddIndexesNoOptimize will certainly run out of space &
             // fail.  Verify that when this happens, index is
             // not corrupt and index in fact has added no
             // documents.  Then, we increase disk space by 2000
             // bytes each iteration.  At some point there is
-            // enough free disk space and addIndexes should
+            // enough free disk space and AddIndexesNoOptimize should
             // succeed and index should show all documents were
             // added.
 
@@ -590,7 +579,7 @@ namespace Lucene.Net.Index
                 startDiskUsage += startDir.FileLength(files2[i]);
             }
 
-            for (int iter = 0; iter < 6; iter++)
+            for (int iter = 0; iter < 3; iter++)
             {
 
                 if (debug)
@@ -599,8 +588,7 @@ namespace Lucene.Net.Index
                 // Start with 100 bytes more than we are currently using:
                 long diskFree = diskUsage + 100;
 
-                bool autoCommit = iter % 2 == 0;
-                int method = iter / 2;
+                int method = iter;
 
                 bool success = false;
                 bool done = false;
@@ -608,7 +596,7 @@ namespace Lucene.Net.Index
                 System.String methodName;
                 if (0 == method)
                 {
-                    methodName = "addIndexes(Directory[])";
+                    methodName = "addIndexes(Directory[]) + optimize()";
                 }
                 else if (1 == method)
                 {
@@ -624,7 +612,7 @@ namespace Lucene.Net.Index
 
                     // Make a new dir that will enforce disk usage:
                     MockRAMDirectory dir = new MockRAMDirectory(startDir);
-                    writer2 = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
+                    writer2 = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
                     System.IO.IOException err = null;
 
                     MergeScheduler ms = writer2.GetMergeScheduler();
@@ -665,14 +653,14 @@ namespace Lucene.Net.Index
                                 rate = 0.0;
                             }
                             if (debug)
-                                testName = "disk full test " + methodName + " with disk full at " + diskFree + " bytes autoCommit=" + autoCommit;
+                                testName = "disk full test " + methodName + " with disk full at " + diskFree + " bytes";
                         }
                         else
                         {
                             thisDiskFree = 0;
                             rate = 0.0;
                             if (debug)
-                                testName = "disk full test " + methodName + " with unlimited disk space autoCommit=" + autoCommit;
+                                testName = "disk full test " + methodName + " with unlimited disk space";
                         }
 
                         if (debug)
@@ -686,14 +674,15 @@ namespace Lucene.Net.Index
 
                             if (0 == method)
                             {
-                                writer2.AddIndexes(dirs);
+                                writer2.AddIndexesNoOptimize(dirs);
+                                writer2.Optimize();
                             }
                             else if (1 == method)
                             {
                                 IndexReader[] readers = new IndexReader[dirs.Length];
                                 for (int i = 0; i < dirs.Length; i++)
                                 {
-                                    readers[i] = IndexReader.Open(dirs[i]);
+                                    readers[i] = IndexReader.Open(dirs[i], true);
                                 }
                                 try
                                 {
@@ -744,33 +733,6 @@ namespace Lucene.Net.Index
                         // ConcurrentMergeScheduler are done
                         _TestUtil.SyncConcurrentMerges(writer2);
 
-                        if (autoCommit)
-                        {
-
-                            // Whether we succeeded or failed, check that
-                            // all un-referenced files were in fact
-                            // deleted (ie, we did not create garbage).
-                            // Only check this when autoCommit is true:
-                            // when it's false, it's expected that there
-                            // are unreferenced files (ie they won't be
-                            // referenced until the "commit on close").
-                            // Just create a new IndexFileDeleter, have it
-                            // delete unreferenced files, then verify that
-                            // in fact no files were deleted:
-
-                            System.String successStr;
-                            if (success)
-                            {
-                                successStr = "success";
-                            }
-                            else
-                            {
-                                successStr = "IOException";
-                            }
-                            System.String message = methodName + " failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes)";
-                            AssertNoUnreferencedFiles(dir, message);
-                        }
-
                         if (debug)
                         {
                             System.Console.Out.WriteLine("  now test readers");
@@ -782,7 +744,7 @@ namespace Lucene.Net.Index
                         // (transactional semantics):
                         try
                         {
-                            reader = IndexReader.Open(dir);
+                            reader = IndexReader.Open(dir, true);
                         }
                         catch (System.IO.IOException e)
                         {
@@ -792,13 +754,9 @@ namespace Lucene.Net.Index
                         int result = reader.DocFreq(searchTerm);
                         if (success)
                         {
-                            if (autoCommit && result != END_COUNT)
+                            if (result != START_COUNT)
                             {
-                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
-                            }
-                            else if (!autoCommit && result != START_COUNT)
-                            {
-                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " [autoCommit = false]");
+                                Assert.Fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT);
                             }
                         }
                         else
@@ -898,12 +856,11 @@ namespace Lucene.Net.Index
 
             bool debug = false;
 
-            for (int pass = 0; pass < 3; pass++)
+            for (int pass = 0; pass < 2; pass++)
             {
                 if (debug)
                     System.Console.Out.WriteLine("TEST: pass=" + pass);
-                bool autoCommit = pass == 0;
-                bool doAbort = pass == 2;
+                bool doAbort = pass == 1;
                 long diskFree = 200;
                 while (true)
                 {
@@ -911,7 +868,7 @@ namespace Lucene.Net.Index
                         System.Console.Out.WriteLine("TEST: cycle: diskFree=" + diskFree);
                     MockRAMDirectory dir = new MockRAMDirectory();
                     dir.SetMaxSizeInBytes(diskFree);
-                    IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 
                     MergeScheduler ms = writer.GetMergeScheduler();
                     if (ms is ConcurrentMergeScheduler)
@@ -942,7 +899,7 @@ namespace Lucene.Net.Index
                     {
                         if (doAbort)
                         {
-                            writer.Abort();
+                            writer.Rollback();
                         }
                         else
                         {
@@ -964,10 +921,10 @@ namespace Lucene.Net.Index
 
                         _TestUtil.SyncConcurrentMerges(ms);
 
-                        AssertNoUnreferencedFiles(dir, "after disk full during addDocument with autoCommit=" + autoCommit);
+                        AssertNoUnreferencedFiles(dir, "after disk full during addDocument");
 
                         // Make sure reader can open the index:
-                        IndexReader.Open(dir).Close();
+                        IndexReader.Open(dir, true).Close();
 
                         dir.Close();
 
@@ -995,7 +952,7 @@ namespace Lucene.Net.Index
             System.Array.Sort(startFiles);
             System.Array.Sort(endFiles);
 
-            if (!SupportClass.CollectionsHelper.Equals(startFiles, endFiles))
+            if (!CollectionsHelper.Equals(startFiles, endFiles))
             {
                 Assert.Fail(message + ": before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
             }
@@ -1006,10 +963,10 @@ namespace Lucene.Net.Index
         public virtual void TestWickedLongTerm()
         {
             RAMDirectory dir = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 
             char[] chars = new char[DocumentsWriter.CHAR_BLOCK_SIZE_ForNUnit - 1];
-            SupportClass.CollectionsHelper.Fill(chars, 'x');
+            CollectionsHelper.Fill(chars, 'x');
             Document doc = new Document();
             System.String bigTerm = new System.String(chars);
 
@@ -1025,7 +982,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
 
             // Make sure all terms < max size were indexed
             Assert.AreEqual(2, reader.DocFreq(new Term("content", "abc")));
@@ -1050,12 +1007,12 @@ namespace Lucene.Net.Index
             // maximum length term, and search on that term:
             doc = new Document();
             doc.Add(new Field("content", bigTerm, Field.Store.NO, Field.Index.ANALYZED));
-            StandardAnalyzer sa = new StandardAnalyzer();
+            StandardAnalyzer sa = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
             sa.SetMaxTokenLength(100000);
             writer = new IndexWriter(dir, sa, IndexWriter.MaxFieldLength.LIMITED);
             writer.AddDocument(doc);
             writer.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(1, reader.DocFreq(new Term("content", bigTerm)));
             reader.Close();
 
@@ -1065,7 +1022,6 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestOptimizeMaxNumSegments()
         {
-
             MockRAMDirectory dir = new MockRAMDirectory();
 
             Document doc = new Document();
@@ -1207,7 +1163,7 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestCreateWithReader()
         {
-            System.IO.FileInfo indexDir = _TestUtil.GetTempDir("lucenetestindexwriter");
+            System.IO.DirectoryInfo indexDir = _TestUtil.GetTempDir("lucenetestindexwriter");
 
             try
             {
@@ -1219,53 +1175,17 @@ namespace Lucene.Net.Index
                 writer.Close();
 
                 // now open reader:
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
 
                 // now open index for create:
                 writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                Assert.AreEqual(writer.DocCount(), 0, "should be zero documents");
-                AddDoc(writer);
-                writer.Close();
-
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-                IndexReader reader2 = IndexReader.Open(dir);
-                Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
-                reader.Close();
-                reader2.Close();
-            }
-            finally
-            {
-                RmDir(indexDir);
-            }
-        }
-
-
-        // Same test as above, but use IndexWriter constructor
-        // that takes File:
-        [Test]
-        public virtual void TestCreateWithReader2()
-        {
-            System.IO.FileInfo indexDir = _TestUtil.GetTempDir("lucenetestindexwriter");
-            try
-            {
-                // add one document & close writer
-                IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                AddDoc(writer);
-                writer.Close();
-
-                // now open reader:
-                IndexReader reader = IndexReader.Open(indexDir);
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-
-                // now open index for create:
-                writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                Assert.AreEqual(writer.DocCount(), 0, "should be zero documents");
+                Assert.AreEqual(writer.MaxDoc(), 0, "should be zero documents");
                 AddDoc(writer);
                 writer.Close();
 
                 Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-                IndexReader reader2 = IndexReader.Open(indexDir);
+                IndexReader reader2 = IndexReader.Open(dir, true);
                 Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
                 reader.Close();
                 reader2.Close();
@@ -1276,42 +1196,6 @@ namespace Lucene.Net.Index
             }
         }
 
-        // Same test as above, but use IndexWriter constructor
-        // that takes String:
-        [Test]
-        public virtual void TestCreateWithReader3()
-        {
-            System.IO.FileInfo dirName = _TestUtil.GetTempDir("lucenetestindexwriter");
-            try
-            {
-
-                // add one document & close writer
-                IndexWriter writer = new IndexWriter(dirName, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                AddDoc(writer);
-                writer.Close();
-
-                // now open reader:
-                IndexReader reader = IndexReader.Open(dirName);
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-
-                // now open index for create:
-                writer = new IndexWriter(dirName, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                Assert.AreEqual(writer.DocCount(), 0, "should be zero documents");
-                AddDoc(writer);
-                writer.Close();
-
-                Assert.AreEqual(reader.NumDocs(), 1, "should be one document");
-                IndexReader reader2 = IndexReader.Open(dirName);
-                Assert.AreEqual(reader2.NumDocs(), 1, "should be one document");
-                reader.Close();
-                reader2.Close();
-            }
-            finally
-            {
-                RmDir(dirName);
-            }
-        }
-
         // Simulate a writer that crashed while writing segments
         // file: make sure we can still open the index (ie,
         // gracefully fallback to the previous segments file),
@@ -1355,7 +1239,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
             }
             catch (System.Exception e)
             {
@@ -1423,7 +1307,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 Assert.Fail("reader did not hit IOException on opening a corrupt index");
             }
             catch (System.Exception e)
@@ -1496,7 +1380,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 Assert.Fail("reader did not hit IOException on opening a corrupt index");
             }
             catch (System.Exception e)
@@ -1509,11 +1393,10 @@ namespace Lucene.Net.Index
         }
 
         /*
-        * Simple test for "commit on close": open writer with
-        * autoCommit=false, so it will only commit on close,
-        * then add a bunch of docs, making sure reader does not
-        * see these docs until writer is closed.
-        */
+         * Simple test for "commit on close": open writer then 
+         * add a bunch of docs, making sure reader does not
+         * see these docs until writer is closed.
+         */
         [Test]
         public virtual void TestCommitOnClose()
         {
@@ -1526,12 +1409,12 @@ namespace Lucene.Net.Index
             writer.Close();
 
             Term searchTerm = new Term("content", "aaa");
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(14, hits.Length, "first number of hits");
             searcher.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
 
             writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
             for (int i = 0; i < 3; i++)
@@ -1540,9 +1423,9 @@ namespace Lucene.Net.Index
                 {
                     AddDoc(writer);
                 }
-                searcher = new IndexSearcher(dir);
+                searcher = new IndexSearcher(dir, true);
                 hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer with autoCommit disabled");
+                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
                 searcher.Close();
                 Assert.IsTrue(reader.IsCurrent(), "reader should have still been current");
             }
@@ -1551,16 +1434,15 @@ namespace Lucene.Net.Index
             writer.Close();
             Assert.IsFalse(reader.IsCurrent(), "reader should not be current now");
 
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(47, hits.Length, "reader did not see changes after writer was closed");
             searcher.Close();
         }
 
         /*
-        * Simple test for "commit on close": open writer with
-        * autoCommit=false, so it will only commit on close,
-        * then add a bunch of docs, making sure reader does not
+        * Simple test for "commit on close": open writer then 
+        * add a bunch of docs, making sure reader does not
         * see them until writer has closed.  Then instead of
         * closing the writer, call abort and verify reader sees
         * nothing was added.  Then verify we can open the index
@@ -1579,7 +1461,7 @@ namespace Lucene.Net.Index
             writer.Close();
 
             Term searchTerm = new Term("content", "aaa");
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(14, hits.Length, "first number of hits");
             searcher.Close();
@@ -1593,17 +1475,17 @@ namespace Lucene.Net.Index
             // Delete all docs:
             writer.DeleteDocuments(searchTerm);
 
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-            Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer with autoCommit disabled");
+            Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
             searcher.Close();
 
             // Now, close the writer:
-            writer.Abort();
+            writer.Rollback();
 
-            AssertNoUnreferencedFiles(dir, "unreferenced files remain after abort()");
+            AssertNoUnreferencedFiles(dir, "unreferenced files remain after Rollback()");
 
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(14, hits.Length, "saw changes after writer.abort");
             searcher.Close();
@@ -1623,14 +1505,14 @@ namespace Lucene.Net.Index
                 {
                     AddDoc(writer);
                 }
-                searcher = new IndexSearcher(dir);
+                searcher = new IndexSearcher(dir, false);
                 hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer with autoCommit disabled");
+                Assert.AreEqual(14, hits.Length, "reader incorrectly sees changes from writer");
                 searcher.Close();
             }
 
             writer.Close();
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(218, hits.Length, "didn't see changes after close");
             searcher.Close();
@@ -1670,7 +1552,7 @@ namespace Lucene.Net.Index
             writer.Optimize();
             writer.Close();
 
-            IndexReader.Open(dir).Close();
+            IndexReader.Open(dir, true).Close();
 
             long endDiskUsage = dir.GetMaxUsedSizeInBytes();
 
@@ -1680,14 +1562,14 @@ namespace Lucene.Net.Index
             // and it doesn't delete intermediate segments then it
             // will exceed this 105X:
             // System.out.println("start " + startDiskUsage + "; mid " + midDiskUsage + ";end " + endDiskUsage);
-            Assert.IsTrue(midDiskUsage < 150 * startDiskUsage, "writer used too much space while adding documents when autoCommit=false: mid=" + midDiskUsage + " start=" + startDiskUsage + " end=" + endDiskUsage);
-            Assert.IsTrue(endDiskUsage < 150 * startDiskUsage, "writer used too much space after close when autoCommit=false endDiskUsage=" + endDiskUsage + " startDiskUsage=" + startDiskUsage);
+            Assert.IsTrue(midDiskUsage < 150 * startDiskUsage, "writer used too much space while adding documents: mid=" + midDiskUsage + " start=" + startDiskUsage + " end=" + endDiskUsage);
+            Assert.IsTrue(endDiskUsage < 150 * startDiskUsage, "writer used too much space after close: endDiskUsage=" + endDiskUsage + " startDiskUsage=" + startDiskUsage);
         }
 
 
         /*
         * Verify that calling optimize when writer is open for
-        * "commit on close" works correctly both for abort()
+        * "commit on close" works correctly both for Rollback()
         * and close().
         */
         [Test]
@@ -1706,7 +1588,7 @@ namespace Lucene.Net.Index
             writer.Optimize();
 
             // Open a reader before closing (commiting) the writer:
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
 
             // Reader should see index as unoptimized at this
             // point:
@@ -1714,11 +1596,11 @@ namespace Lucene.Net.Index
             reader.Close();
 
             // Abort the writer:
-            writer.Abort();
+            writer.Rollback();
             AssertNoUnreferencedFiles(dir, "aborted writer after optimize");
 
             // Open a reader after aborting writer:
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
 
             // Reader should still see index as unoptimized:
             Assert.IsFalse(reader.IsOptimized(), "Reader incorrectly sees that the index is optimized");
@@ -1730,7 +1612,7 @@ namespace Lucene.Net.Index
             AssertNoUnreferencedFiles(dir, "aborted writer after optimize");
 
             // Open a reader after aborting writer:
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
 
             // Reader should still see index as unoptimized:
             Assert.IsTrue(reader.IsOptimized(), "Reader incorrectly sees that the index is unoptimized");
@@ -1742,19 +1624,19 @@ namespace Lucene.Net.Index
         {
             RAMDirectory dir = new RAMDirectory();
             IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-            writer.Flush();
+            writer.Commit();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.MaxDoc());
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
 
             writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
-            writer.Flush();
+            writer.Commit();
             writer.Close();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.MaxDoc());
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
@@ -1779,7 +1661,7 @@ namespace Lucene.Net.Index
             }
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(100, reader.MaxDoc());
             Assert.AreEqual(100, reader.NumDocs());
             for (int j = 0; j < 100; j++)
@@ -2024,7 +1906,7 @@ namespace Lucene.Net.Index
             }
             writer.Close();
 
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("field", "aaa")), null, 1000).ScoreDocs;
             Assert.AreEqual(300, hits.Length);
             searcher.Close();
@@ -2054,7 +1936,7 @@ namespace Lucene.Net.Index
 
             Term searchTerm = new Term("field", "aaa");
 
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(10, hits.Length);
             searcher.Close();
@@ -2074,12 +1956,12 @@ namespace Lucene.Net.Index
                 writer.AddDocument(doc);
             }
             writer.Close();
-            searcher = new IndexSearcher(dir);
+            searcher = new IndexSearcher(dir, false);
             hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(27, hits.Length);
             searcher.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             reader.Close();
 
             dir.Close();
@@ -2105,7 +1987,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(1, reader.MaxDoc());
             Assert.AreEqual(1, reader.NumDocs());
             Term t = new Term("field", "a");
@@ -2135,7 +2017,7 @@ namespace Lucene.Net.Index
             }
             writer.Close();
             Term searchTerm = new Term("content", "aaa");
-            IndexSearcher searcher = new IndexSearcher(dir);
+            IndexSearcher searcher = new IndexSearcher(dir, false);
             ScoreDoc[] hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
             Assert.AreEqual(100, hits.Length, "did not get right number of hits");
             writer.Close();
@@ -2175,11 +2057,11 @@ namespace Lucene.Net.Index
             Document doc = new Document();
             doc.Add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
             writer.AddDocument(doc);
-            writer.Flush();
+            writer.Commit();
             writer.AddDocument(new Document());
             writer.Close();
             _TestUtil.CheckIndex(dir);
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(2, reader.NumDocs());
         }
 
@@ -2206,7 +2088,7 @@ namespace Lucene.Net.Index
                 if (0 == pass)
                 {
                     writer.Close();
-                    IndexReader reader = IndexReader.Open(dir);
+                    IndexReader reader = IndexReader.Open(dir, false);
                     Assert.IsTrue(reader.IsOptimized());
                     reader.Close();
                 }
@@ -2218,7 +2100,7 @@ namespace Lucene.Net.Index
                     writer.AddDocument(doc);
                     writer.Close();
 
-                    IndexReader reader = IndexReader.Open(dir);
+                    IndexReader reader = IndexReader.Open(dir, true);
                     Assert.IsTrue(!reader.IsOptimized());
                     reader.Close();
 
@@ -2231,9 +2113,10 @@ namespace Lucene.Net.Index
             dir.Close();
         }
 
-        private void RmDir(System.IO.FileInfo dir)
+        // TODO: Could be less complicated.
+        private void RmDir(System.IO.DirectoryInfo dir)
         {
-            System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(dir);
+            System.IO.FileInfo[] files = FileSupport.GetFiles(dir);
             if (files != null)
             {
                 for (int i = 0; i < files.Length; i++)
@@ -2281,7 +2164,7 @@ namespace Lucene.Net.Index
         public virtual void TestBadSegment()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 
             Document document = new Document();
             document.Add(new Field("tvtest", "", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
@@ -2295,7 +2178,7 @@ namespace Lucene.Net.Index
         public virtual void TestNoTermVectorAfterTermVector()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
             Document document = new Document();
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
@@ -2303,12 +2186,12 @@ namespace Lucene.Net.Index
             document.Add(new Field("tvtest", "x y z", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
             iw.AddDocument(document);
             // Make first segment
-            iw.Flush();
+            iw.Commit();
 
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
             // Make 2nd segment
-            iw.Flush();
+            iw.Commit();
 
             iw.Optimize();
             iw.Close();
@@ -2320,24 +2203,24 @@ namespace Lucene.Net.Index
         public virtual void TestNoTermVectorAfterTermVectorMerge()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
             Document document = new Document();
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
-            iw.Flush();
+            iw.Commit();
 
             document = new Document();
             document.Add(new Field("tvtest", "x y z", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
             iw.AddDocument(document);
             // Make first segment
-            iw.Flush();
+            iw.Commit();
 
             iw.Optimize();
 
             document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             iw.AddDocument(document);
             // Make 2nd segment
-            iw.Flush();
+            iw.Commit();
             iw.Optimize();
 
             iw.Close();
@@ -2348,23 +2231,23 @@ namespace Lucene.Net.Index
         [Test]
         public virtual void TestMaxThreadPriority()
         {
-            int pri = (System.Int32)SupportClass.ThreadClass.Current().Priority;
+            int pri = (System.Int32)ThreadClass.Current().Priority;
             try
             {
                 MockRAMDirectory dir = new MockRAMDirectory();
-                IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+                IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
                 Document document = new Document();
                 document.Add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
                 iw.SetMaxBufferedDocs(2);
                 iw.SetMergeFactor(2);
-                SupportClass.ThreadClass.Current().Priority = (System.Threading.ThreadPriority)System.Threading.ThreadPriority.Highest;
+                ThreadClass.Current().Priority = (System.Threading.ThreadPriority)System.Threading.ThreadPriority.Highest;
                 for (int i = 0; i < 4; i++)
                     iw.AddDocument(document);
                 iw.Close();
             }
             finally
             {
-                SupportClass.ThreadClass.Current().Priority = (System.Threading.ThreadPriority)pri;
+                ThreadClass.Current().Priority = (System.Threading.ThreadPriority)pri;
             }
         }
 
@@ -2405,10 +2288,11 @@ namespace Lucene.Net.Index
                     }
                 }
             }
-
-            public override /*virtual*/ void Close()
+            protected override void Dispose(bool disposing)
             {
+                // Do nothing
             }
+            
         }
 
         // LUCENE-1013
@@ -2416,7 +2300,7 @@ namespace Lucene.Net.Index
         public virtual void TestSetMaxMergeDocs()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
             iw.SetMergeScheduler(new MyMergeScheduler(this));
             iw.SetMaxMergeDocs(20);
             iw.SetMaxBufferedDocs(2);
@@ -2458,7 +2342,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
 
             writer.Close();
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Term t = new Term("content", "aa");
             Assert.AreEqual(reader.DocFreq(t), 3);
 
@@ -2548,7 +2432,7 @@ namespace Lucene.Net.Index
             }
             Assert.IsTrue(hitError);
             writer.Close();
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(198, reader.DocFreq(new Term("content", "aa")));
             reader.Close();
         }
@@ -2626,7 +2510,7 @@ namespace Lucene.Net.Index
                 }
                 writer.Close();
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 int expected = 3 + (1 - i) * 2;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2654,7 +2538,7 @@ namespace Lucene.Net.Index
                 writer.Optimize();
                 writer.Close();
 
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 expected = 19 + (1 - i) * 2;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2693,7 +2577,7 @@ namespace Lucene.Net.Index
 
                     int finalI = i;
 
-                    SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[NUM_THREAD];
+                    ThreadClass[] threads = new ThreadClass[NUM_THREAD];
                     for (int t = 0; t < NUM_THREAD; t++)
                     {
                         threads[t] = new AnonymousClassThread(NUM_ITER, writer, finalI, this);
@@ -2706,7 +2590,7 @@ namespace Lucene.Net.Index
                     writer.Close();
                 }
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 int expected = (3 + (1 - i) * 2) * NUM_THREAD * NUM_ITER;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2734,7 +2618,7 @@ namespace Lucene.Net.Index
                 writer2.Optimize();
                 writer2.Close();
 
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
                 expected += 17 - NUM_THREAD * NUM_ITER;
                 Assert.AreEqual(expected, reader.DocFreq(new Term("contents", "here")));
                 Assert.AreEqual(expected, reader.MaxDoc());
@@ -2794,7 +2678,7 @@ namespace Lucene.Net.Index
                     writer.AddDocument(doc);
 
                 writer.Close();
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, false);
                 reader.DeleteDocument(delID++);
                 reader.Close();
 
@@ -2817,17 +2701,16 @@ namespace Lucene.Net.Index
             Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
             doc.Add(idField);
 
-            for (int pass = 0; pass < 3; pass++)
+            for (int pass = 0; pass < 2; pass++)
             {
-                bool autoCommit = pass % 2 == 0;
-                IndexWriter writer = new IndexWriter(directory, autoCommit, new WhitespaceAnalyzer(), true);
+                IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 
-                //System.out.println("TEST: pass=" + pass + " ac=" + autoCommit + " cms=" + (pass >= 2));
+                //System.out.println("TEST: pass=" + pass + " cms=" + (pass >= 2));
                 for (int iter = 0; iter < 10; iter++)
                 {
                     //System.out.println("TEST: iter=" + iter);
                     MergeScheduler ms;
-                    if (pass >= 2)
+                    if (pass == 1)
                         ms = new ConcurrentMergeScheduler();
                     else
                         ms = new SerialMergeScheduler();
@@ -2855,7 +2738,7 @@ namespace Lucene.Net.Index
 
                     IndexWriter finalWriter = writer;
                     System.Collections.ArrayList failure = new System.Collections.ArrayList();
-                    SupportClass.ThreadClass t1 = new AnonymousClassThread1(finalWriter, doc, failure, this);
+                    ThreadClass t1 = new AnonymousClassThread1(finalWriter, doc, failure, this);
 
                     if (failure.Count > 0)
                     {
@@ -2868,11 +2751,11 @@ namespace Lucene.Net.Index
                     t1.Join();
 
                     // Make sure reader can read
-                    IndexReader reader = IndexReader.Open(directory);
+                    IndexReader reader = IndexReader.Open(directory, true);
                     reader.Close();
 
                     // Reopen
-                    writer = new IndexWriter(directory, autoCommit, new WhitespaceAnalyzer(), false);
+                    writer = new IndexWriter(directory, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
                 }
                 writer.Close();
             }
@@ -2881,7 +2764,7 @@ namespace Lucene.Net.Index
         }
 
         // Used by test cases below
-        private class IndexerThread : SupportClass.ThreadClass
+        private class IndexerThread : ThreadClass
         {
             private void InitBlock(TestIndexWriter enclosingInstance)
             {
@@ -2935,15 +2818,14 @@ namespace Lucene.Net.Index
                         if (ioe.Message.StartsWith("fake disk full at") || ioe.Message.Equals("now failing on purpose"))
                         {
                             diskFull = true;
-                            try
-                            {
+                            //try
+                            //{
                                 System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64)10000 * 1));
-                            }
-                            catch (System.Threading.ThreadInterruptedException ie)
-                            {
-                                SupportClass.ThreadClass.Current().Interrupt();
-                                throw new System.SystemException("", ie);
-                            }
+                            //}
+                            //catch (System.Threading.ThreadInterruptedException ie)
+                            //{
+                            //    throw;
+                            //}
                             if (fullCount++ >= 5)
                                 break;
                         }
@@ -2951,7 +2833,7 @@ namespace Lucene.Net.Index
                         {
                             if (noErrors)
                             {
-                                System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": ERROR: unexpected IOException:");
+                                System.Console.Out.WriteLine(ThreadClass.Current().Name + ": ERROR: unexpected IOException:");
                                 System.Console.Out.WriteLine(ioe.StackTrace);
                                 error = ioe;
                             }
@@ -2963,7 +2845,7 @@ namespace Lucene.Net.Index
                         //t.printStackTrace(System.out);
                         if (noErrors)
                         {
-                            System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": ERROR: unexpected Throwable:");
+                            System.Console.Out.WriteLine(ThreadClass.Current().Name + ": ERROR: unexpected Throwable:");
                             System.Console.Out.WriteLine(t.StackTrace);
                             error = t;
                         }
@@ -3029,7 +2911,7 @@ namespace Lucene.Net.Index
                 }
 
                 // Quick test to make sure index is not corrupt:
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 TermDocs tdocs = reader.TermDocs(new Term("field", "aaa"));
                 int count = 0;
                 while (tdocs.Next())
@@ -3094,7 +2976,7 @@ namespace Lucene.Net.Index
             for (int iter = 0; iter < 10; iter++)
             {
                 MockRAMDirectory dir = new MockRAMDirectory();
-                IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
                 ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
                 // We expect disk full exceptions in the merge threads
                 cms.SetSuppressExceptions();
@@ -3166,7 +3048,7 @@ namespace Lucene.Net.Index
         {
             MockRAMDirectory dir = new MockRAMDirectory();
 
-            IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             writer.SetMaxBufferedDocs(2);
             Document doc = new Document();
             doc.Add(new Field("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -3180,6 +3062,7 @@ namespace Lucene.Net.Index
             {
                 writer.AddDocument(doc);
                 writer.AddDocument(doc);
+                writer.Commit();
                 Assert.Fail("did not hit exception");
             }
             catch (System.IO.IOException ioe)
@@ -3241,7 +3124,7 @@ namespace Lucene.Net.Index
 
                 if (success)
                 {
-                    IndexReader reader = IndexReader.Open(dir);
+                    IndexReader reader = IndexReader.Open(dir, true);
                     for (int j = 0; j < reader.MaxDoc(); j++)
                     {
                         if (!reader.IsDeleted(j))
@@ -3258,7 +3141,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), is OK:
+        // IOException during Rollback(), is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbort()
         {
@@ -3266,7 +3149,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), is OK:
+        // IOException during Rollback(), is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbortOnlyOnce()
         {
@@ -3274,7 +3157,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), with multiple threads, is OK:
+        // IOException during Rollback(), with multiple threads, is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbortWithThreads()
         {
@@ -3282,7 +3165,7 @@ namespace Lucene.Net.Index
         }
 
         // LUCENE-1130: make sure initial IOException, and then 2nd
-        // IOException during abort(), with multiple threads, is OK:
+        // IOException during Rollback(), with multiple threads, is OK:
         [Test]
         public virtual void TestIOExceptionDuringAbortWithThreadsOnlyOnce()
         {
@@ -3417,7 +3300,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Term t = new Term("field", "x");
             Assert.AreEqual(1, reader.DocFreq(t));
             reader.Close();
@@ -3458,7 +3341,7 @@ namespace Lucene.Net.Index
             IndexReader reader = null;
             try
             {
-                reader = IndexReader.Open(dir);
+                reader = IndexReader.Open(dir, true);
             }
             catch (System.IO.IOException e)
             {
@@ -3481,7 +3364,7 @@ namespace Lucene.Net.Index
             for (int i = 0; i < 23; i++)
                 AddDoc(writer);
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             writer.Commit();
             IndexReader reader2 = reader.Reopen();
@@ -3493,12 +3376,12 @@ namespace Lucene.Net.Index
                 AddDoc(writer);
             Assert.AreEqual(23, reader2.NumDocs());
             reader2.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
             writer.Commit();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(40, reader.NumDocs());
             reader.Close();
             writer.Close();
@@ -3536,25 +3419,36 @@ namespace Lucene.Net.Index
             FailOnlyInSync failure = new FailOnlyInSync();
             dir.FailOn(failure);
 
-            IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             failure.SetDoFail();
 
             ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
-            // We expect sync exceptions in the merge threads
-            cms.SetSuppressExceptions();
             writer.SetMergeScheduler(cms);
             writer.SetMaxBufferedDocs(2);
             writer.SetMergeFactor(5);
 
             for (int i = 0; i < 23; i++)
+            {
                 AddDoc(writer);
+                if ((i - 1) % 2 == 0)
+                {
+                    try
+                    {
+                        writer.Commit();
+                    }
+                    catch (System.IO.IOException ioe)
+                    {
+                        //expected
+                    }
+                }
+            }
 
             cms.Sync();
             Assert.IsTrue(failure.didFail);
             failure.ClearDoFail();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
             dir.Close();
@@ -3569,7 +3463,8 @@ namespace Lucene.Net.Index
             for (int iter = 0; iter < 4; iter++)
             {
                 bool autoCommit = 1 == iter / 2;
-                IndexWriter writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
+                IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT),
+                                                     IndexWriter.MaxFieldLength.UNLIMITED);
                 writer.SetMaxBufferedDocs(2);
                 writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
                 writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3591,7 +3486,7 @@ namespace Lucene.Net.Index
                 writer.Optimize();
                 writer.Close();
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 for (int i = 0; i < reader.NumDocs(); i++)
                 {
                     reader.Document(i);
@@ -3599,14 +3494,15 @@ namespace Lucene.Net.Index
                 }
                 reader.Close();
 
-                writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
+                writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
                 writer.SetMaxBufferedDocs(2);
                 writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
                 writer.SetMergeScheduler(new SerialMergeScheduler());
                 writer.SetMergePolicy(new LogDocMergePolicy(writer));
 
                 Directory[] indexDirs = new Directory[] { new MockRAMDirectory(dir) };
-                writer.AddIndexes(indexDirs);
+                writer.AddIndexesNoOptimize(indexDirs);
+                writer.Optimize();
                 writer.Close();
             }
             dir.Close();
@@ -3617,10 +3513,9 @@ namespace Lucene.Net.Index
         public virtual void TestTermVectorCorruption2()
         {
             Directory dir = new MockRAMDirectory();
-            for (int iter = 0; iter < 4; iter++)
+            for (int iter = 0; iter < 2; iter++)
             {
-                bool autoCommit = 1 == iter / 2;
-                IndexWriter writer = new IndexWriter(dir, autoCommit, new StandardAnalyzer());
+                IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
                 writer.SetMaxBufferedDocs(2);
                 writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
                 writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3641,7 +3536,7 @@ namespace Lucene.Net.Index
                 writer.Optimize();
                 writer.Close();
 
-                IndexReader reader = IndexReader.Open(dir);
+                IndexReader reader = IndexReader.Open(dir, true);
                 Assert.IsTrue(reader.GetTermFreqVectors(0) == null);
                 Assert.IsTrue(reader.GetTermFreqVectors(1) == null);
                 Assert.IsTrue(reader.GetTermFreqVectors(2) != null);
@@ -3655,7 +3550,7 @@ namespace Lucene.Net.Index
         public virtual void TestTermVectorCorruption3()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
             writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3672,7 +3567,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
             writer.SetMergeScheduler(new SerialMergeScheduler());
@@ -3683,7 +3578,7 @@ namespace Lucene.Net.Index
             writer.Optimize();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             for (int i = 0; i < 10; i++)
             {
                 reader.GetTermFreqVectors(i);
@@ -3710,7 +3605,7 @@ namespace Lucene.Net.Index
             writer.AddDocument(doc);
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Term t = new Term("field", "x");
             Assert.AreEqual(1, reader.DocFreq(t));
             reader.Close();
@@ -3723,7 +3618,7 @@ namespace Lucene.Net.Index
         public virtual void TestExpungeDeletes()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
 
@@ -3738,7 +3633,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, false);
             Assert.AreEqual(10, ir.MaxDoc());
             Assert.AreEqual(10, ir.NumDocs());
             ir.DeleteDocument(0);
@@ -3746,13 +3641,13 @@ namespace Lucene.Net.Index
             Assert.AreEqual(8, ir.NumDocs());
             ir.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             Assert.AreEqual(8, writer.NumDocs());
             Assert.AreEqual(10, writer.MaxDoc());
             writer.ExpungeDeletes();
             Assert.AreEqual(8, writer.NumDocs());
             writer.Close();
-            ir = IndexReader.Open(dir);
+            ir = IndexReader.Open(dir, true);
             Assert.AreEqual(8, ir.MaxDoc());
             Assert.AreEqual(8, ir.NumDocs());
             ir.Close();
@@ -3764,7 +3659,7 @@ namespace Lucene.Net.Index
         public virtual void TestExpungeDeletes2()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetMergeFactor(50);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -3780,7 +3675,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, false);
             Assert.AreEqual(98, ir.MaxDoc());
             Assert.AreEqual(98, ir.NumDocs());
             for (int i = 0; i < 98; i += 2)
@@ -3788,12 +3683,12 @@ namespace Lucene.Net.Index
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMergeFactor(3);
             Assert.AreEqual(49, writer.NumDocs());
             writer.ExpungeDeletes();
             writer.Close();
-            ir = IndexReader.Open(dir);
+            ir = IndexReader.Open(dir, true);
             Assert.AreEqual(49, ir.MaxDoc());
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
@@ -3806,7 +3701,7 @@ namespace Lucene.Net.Index
         public virtual void TestExpungeDeletes3()
         {
             Directory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             writer.SetMaxBufferedDocs(2);
             writer.SetMergeFactor(50);
             writer.SetRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -3822,7 +3717,7 @@ namespace Lucene.Net.Index
                 writer.AddDocument(document);
             writer.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, false);
             Assert.AreEqual(98, ir.MaxDoc());
             Assert.AreEqual(98, ir.NumDocs());
             for (int i = 0; i < 98; i += 2)
@@ -3830,12 +3725,12 @@ namespace Lucene.Net.Index
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
 
-            writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
             // Force many merges to happen
             writer.SetMergeFactor(3);
             writer.ExpungeDeletes(false);
             writer.Close();
-            ir = IndexReader.Open(dir);
+            ir = IndexReader.Open(dir, true);
             Assert.AreEqual(49, ir.MaxDoc());
             Assert.AreEqual(49, ir.NumDocs());
             ir.Close();
@@ -3847,7 +3742,7 @@ namespace Lucene.Net.Index
         public virtual void TestEmptyFieldName()
         {
             MockRAMDirectory dir = new MockRAMDirectory();
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
             Document doc = new Document();
             doc.Add(new Field("", "a b c", Field.Store.NO, Field.Index.ANALYZED));
             writer.AddDocument(doc);
@@ -3900,7 +3795,7 @@ namespace Lucene.Net.Index
             {
                 w.AddDocument(doc);
 
-                if (SupportClass.BuildType.Debug)
+                if (BuildType.Debug)
                     Assert.Fail("did not hit exception");
                 else
                     Assert.Ignore("This test is not executed in release mode");
@@ -4002,7 +3897,7 @@ namespace Lucene.Net.Index
                 }
 
             ((ConcurrentMergeScheduler)w.GetMergeScheduler()).Sync();
-            if (SupportClass.BuildType.Debug)
+            if (BuildType.Debug)
                 Assert.IsTrue(w.failed);
             else
                 Assert.Ignore("This test is not executed in release mode");
@@ -4066,7 +3961,7 @@ namespace Lucene.Net.Index
             Assert.IsTrue(w.afterWasCalled);
             w.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             Assert.AreEqual(1, ir.MaxDoc());
             Assert.AreEqual(0, ir.NumDocs());
             ir.Close();
@@ -4088,10 +3983,16 @@ namespace Lucene.Net.Index
                 {
                     System.Diagnostics.StackFrame sf = trace.GetFrame(i);
                     string className = sf.GetMethod().DeclaringType.Namespace + "." + sf.GetMethod().DeclaringType.Name;
-                    if ("Lucene.Net.Index.SegmentInfos".Equals(className) && "PrepareCommit".Equals(sf.GetMethod().Name))
+                    // NOTE: Added "Write" to the method name comparisons for TestExceptionDuringCommit to pass.
+                    // In release mode, the JITer inlines PrepareCommit, and it wasn't getting caught by this check.
+                    // Write seems to get the expected behavior, though. The other alternative, to disable inlining
+                    // on that function, which would be specifically for testing only; hurting release performance - cc
+                    if ("Lucene.Net.Index.SegmentInfos".Equals(className) && ("Write".Equals(sf.GetMethod().Name) || "PrepareCommit".Equals(sf.GetMethod().Name)))
                         isCommit = true;
                     if ("Lucene.Net.Store.MockRAMDirectory".Equals(className) && "DeleteFile".Equals(sf.GetMethod().Name))
                         isDelete = true;
+                    if ("Lucene.Net.Index.SegmentInfos".Equals(className))
+                        Console.WriteLine(sf.GetMethod().Name);
                 }
 
                 if (isCommit)
@@ -4110,11 +4011,7 @@ namespace Lucene.Net.Index
             }
         }
 
-        // LUCENE-1214
-#if GALLIO
-        [Ignore]
-        // TODO: figure out why this fails with nunit & gallio in release mode
-#endif 
+        // LUCENE-1214 
         [Test]
         public virtual void TestExceptionsDuringCommit()
         {
@@ -4139,7 +4036,7 @@ namespace Lucene.Net.Index
                 // Expected
             }
             Assert.IsTrue(failure.fail1 && failure.fail2);
-            w.Abort();
+            w.Rollback();
             dir.Close();
         }
 
@@ -4159,7 +4056,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             Document doc2 = ir.Document(0);
             for (int i = 0; i < count; i++)
             {
@@ -4382,7 +4279,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Commit();
 
-            IndexSearcher s = new IndexSearcher(dir);
+            IndexSearcher s = new IndexSearcher(dir, false);
             PhraseQuery pq = new PhraseQuery();
             pq.Add(new Term("field", "a"));
             pq.Add(new Term("field", "b"));
@@ -4417,12 +4314,12 @@ namespace Lucene.Net.Index
             for (int i = 0; i < 23; i++)
                 AddDoc(writer);
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
 
             writer.PrepareCommit();
 
-            IndexReader reader2 = IndexReader.Open(dir);
+            IndexReader reader2 = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader2.NumDocs());
 
             writer.Commit();
@@ -4439,18 +4336,18 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual(23, reader3.NumDocs());
             reader3.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
 
             writer.PrepareCommit();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(23, reader.NumDocs());
             reader.Close();
 
             writer.Commit();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(40, reader.NumDocs());
             reader.Close();
             writer.Close();
@@ -4472,12 +4369,12 @@ namespace Lucene.Net.Index
             for (int i = 0; i < 23; i++)
                 AddDoc(writer);
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
 
             writer.PrepareCommit();
 
-            IndexReader reader2 = IndexReader.Open(dir);
+            IndexReader reader2 = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader2.NumDocs());
 
             writer.Rollback();
@@ -4495,18 +4392,18 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual(0, reader3.NumDocs());
             reader3.Close();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
 
             writer.PrepareCommit();
 
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
 
             writer.Commit();
-            reader = IndexReader.Open(dir);
+            reader = IndexReader.Open(dir, true);
             Assert.AreEqual(17, reader.NumDocs());
             reader.Close();
             writer.Close();
@@ -4524,7 +4421,7 @@ namespace Lucene.Net.Index
             writer.Commit();
             writer.Close();
 
-            IndexReader reader = IndexReader.Open(dir);
+            IndexReader reader = IndexReader.Open(dir, true);
             Assert.AreEqual(0, reader.NumDocs());
             reader.Close();
             dir.Close();
@@ -4532,7 +4429,7 @@ namespace Lucene.Net.Index
 
         private abstract class RunAddIndexesThreads
         {
-            private class AnonymousClassThread2 : SupportClass.ThreadClass
+            private class AnonymousClassThread2 : ThreadClass
             {
                 public AnonymousClassThread2(int numIter, RunAddIndexesThreads enclosingInstance)
                 {
@@ -4580,7 +4477,7 @@ namespace Lucene.Net.Index
             }
             private void InitBlock()
             {
-                threads = new SupportClass.ThreadClass[NUM_THREADS];
+                threads = new ThreadClass[NUM_THREADS];
             }
 
             internal Directory dir, dir2;
@@ -4591,7 +4488,7 @@ namespace Lucene.Net.Index
             internal IndexReader[] readers;
             internal int NUM_COPY;
             internal const int NUM_THREADS = 5;
-            internal SupportClass.ThreadClass[] threads;
+            internal ThreadClass[] threads;
             internal ConcurrentMergeScheduler cms;
 
             public RunAddIndexesThreads(int numCopy)
@@ -4610,12 +4507,12 @@ namespace Lucene.Net.Index
 
                 readers = new IndexReader[NUM_COPY];
                 for (int i = 0; i < NUM_COPY; i++)
-                    readers[i] = IndexReader.Open(dir);
+                    readers[i] = IndexReader.Open(dir, true);
             }
 
             internal virtual void LaunchThreads(int numIter)
             {
-                threads = new SupportClass.ThreadClass[NUM_THREADS]; //{{DIGY}} Should this be created somewhere else?
+                threads = new ThreadClass[NUM_THREADS]; //{{DIGY}} Should this be created somewhere else?
                 for (int i = 0; i < NUM_THREADS; i++)
                 {
                     threads[i] = new AnonymousClassThread2(numIter, this);
@@ -4684,7 +4581,8 @@ namespace Lucene.Net.Index
                 {
 
                     case 0:
-                        writer2.AddIndexes(dirs);
+                        writer2.AddIndexesNoOptimize(dirs);
+                        writer2.Optimize();
                         break;
 
                     case 1:
@@ -4726,7 +4624,7 @@ namespace Lucene.Net.Index
 
             _TestUtil.CheckIndex(c.dir2);
 
-            IndexReader reader = IndexReader.Open(c.dir2);
+            IndexReader reader = IndexReader.Open(c.dir2, true);
             Assert.AreEqual(100 + NUM_COPY * (3 * NUM_ITER / 4) * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_THREADS * Lucene.Net.Index.TestIndexWriter.CommitAndAddIndexes.NUM_INIT_DOCS, reader.NumDocs());
             reader.Close();
 
@@ -4816,7 +4714,8 @@ namespace Lucene.Net.Index
                 {
 
                     case 0:
-                        writer2.AddIndexes(dirs);
+                        writer2.AddIndexesNoOptimize(dirs);
+                        writer2.Optimize();
                         break;
 
                     case 1:
@@ -4956,7 +4855,7 @@ namespace Lucene.Net.Index
             try
             {
                 w.Rollback();
-                if (SupportClass.BuildType.Debug)
+                if (BuildType.Debug)
                     Assert.Fail("did not hit intentional RuntimeException");
                 else
                     Assert.Ignore("This test is not executed in release mode");
@@ -4993,7 +4892,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader ir = IndexReader.Open(dir);
+            IndexReader ir = IndexReader.Open(dir, true);
             doc = ir.Document(0);
             f = doc.GetField("binary");
             b = f.GetBinaryValue();
@@ -5004,63 +4903,6 @@ namespace Lucene.Net.Index
             dir.Close();
         }
 
-        // LUCENE-1374
-        [Test]
-        public virtual void TestMergeCompressedFields()
-        {
-            System.IO.FileInfo indexDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "mergecompressedfields"));
-            Directory dir = FSDirectory.Open(indexDir);
-            try
-            {
-                for (int i = 0; i < 5; i++)
-                {
-                    // Must make a new writer & doc each time, w/
-                    // different fields, so bulk merge of stored fields
-                    // cannot run:
-                    IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), i == 0, IndexWriter.MaxFieldLength.UNLIMITED);
-                    try
-                    {
-                        w.SetMergeFactor(5);
-                        w.SetMergeScheduler(new SerialMergeScheduler());
-                        Document doc = new Document();
-                        doc.Add(new Field("test1", "this is some data that will be compressed this this this", Field.Store.COMPRESS, Field.Index.NO));
-                        doc.Add(new Field("test2", new byte[20], Field.Store.COMPRESS));
-                        doc.Add(new Field("field" + i, "random field", Field.Store.NO, Field.Index.ANALYZED));
-                        w.AddDocument(doc);
-                    }
-                    finally
-                    {
-                        w.Close();
-                    }
-                }
-
-                byte[] cmp = new byte[20];
-
-                IndexReader r = IndexReader.Open(dir);
-                try
-                {
-                    for (int i = 0; i < 5; i++)
-                    {
-                        Document doc = r.Document(i);
-                        Assert.AreEqual(doc.GetField("test1").StringValue(), "this is some data that will be compressed this this this");
-                        byte[] b = doc.GetField("test2").BinaryValue();
-                        Assert.AreEqual(b.Length, cmp.Length);
-                        for (int j = 0; j < b.Length; j++)
-                            Assert.AreEqual(b[j], cmp[j]);
-                    }
-                }
-                finally
-                {
-                    r.Close();
-                }
-            }
-            finally
-            {
-                dir.Close();
-                _TestUtil.RmDir(indexDir);
-            }
-        }
-
         // LUCENE-1382
         [Test]
         public virtual void TestCommitUserData()
@@ -5074,7 +4916,7 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual(0, IndexReader.GetCommitUserData(dir).Count);
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             // commit(Map) never called for this index
             Assert.AreEqual(0, r.GetCommitUserData().Count);
             r.Close();
@@ -5090,7 +4932,7 @@ namespace Lucene.Net.Index
 
             Assert.AreEqual("test1", IndexReader.GetCommitUserData(dir)["label"]);
 
-            r = IndexReader.Open(dir);
+            r = IndexReader.Open(dir, true);
             Assert.AreEqual("test1", r.GetCommitUserData()["label"]);
             r.Close();
 
@@ -5141,7 +4983,7 @@ namespace Lucene.Net.Index
 
             System.Collections.IList thrown = new System.Collections.ArrayList();
 
-            IndexWriter writer = new AnonymousClassIndexWriter(thrown, this, new MockRAMDirectory(), new StandardAnalyzer());
+            IndexWriter writer = new AnonymousClassIndexWriter(thrown, this, new MockRAMDirectory(), new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 
             // need to set an info stream so message is called
             writer.SetInfoStream(new System.IO.StreamWriter(new System.IO.MemoryStream()));
@@ -5174,7 +5016,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
 
             // Token "" occurred once
@@ -5208,7 +5050,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5232,7 +5074,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);
             TermVectorOffsetInfo[] termOffsets = ((TermPositionVector)r.GetTermFreqVector(0, "field")).GetOffsets(0);
             Assert.AreEqual(2, termOffsets.Length);
             Assert.AreEqual(0, termOffsets[0].GetStartOffset());
@@ -5258,7 +5100,7 @@ namespace Lucene.Net.Index
             w.AddDocument(doc);
             w.Close();
 
-            IndexReader r = IndexReader.Open(dir);
+            IndexReader r = IndexReader.Open(dir, true);

[... 457 lines stripped ...]


Mime
View raw message