lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r832486 [12/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...
Date Tue, 03 Nov 2009 18:06:38 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterDelete.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterDelete.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterDelete.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterDelete.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -22,19 +22,20 @@
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
 using TermQuery = Lucene.Net.Search.TermQuery;
-using Directory = Lucene.Net.Store.Directory;
-using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
-	[TestFixture]
-	public class TestIndexWriterDelete : LuceneTestCase
+	
+    [TestFixture]
+	public class TestIndexWriterDelete:LuceneTestCase
 	{
-		private class AnonymousClassFailure : MockRAMDirectory.Failure
+		private class AnonymousClassFailure:MockRAMDirectory.Failure
 		{
 			public AnonymousClassFailure(TestIndexWriterDelete enclosingInstance)
 			{
@@ -66,11 +67,11 @@
 				if (sawMaybe && !failed)
 				{
 					bool seen = false;
-					System.Diagnostics.StackFrame[] frames = new System.Diagnostics.StackTrace().GetFrames();
-					for (int i = 0; i < frames.Length; i++)
+					System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
+					for (int i = 0; i < trace.FrameCount; i++)
 					{
-						System.String methodName = frames[i].GetMethod().Name;
-						if ("ApplyDeletes".Equals(methodName))
+						System.Diagnostics.StackFrame sf = trace.GetFrame(i);
+						if ("ApplyDeletes".Equals(sf.GetMethod()))
 						{
 							seen = true;
 							break;
@@ -85,11 +86,11 @@
 				}
 				if (!failed)
 				{
-					System.Diagnostics.StackFrame[] frames = new System.Diagnostics.StackTrace().GetFrames();
-					for (int i = 0; i < frames.Length; i++)
+					System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
+					for (int i = 0; i < trace.FrameCount; i++)
 					{
-						System.String methodName = frames[i].GetMethod().Name;
-						if ("ApplyDeletes".Equals(methodName))
+						System.Diagnostics.StackFrame sf = trace.GetFrame(i);
+						if ("ApplyDeletes".Equals(sf.GetMethod()))
 						{
 							sawMaybe = true;
 							break;
@@ -98,8 +99,7 @@
 				}
 			}
 		}
-		
-		private class AnonymousClassFailure1 : MockRAMDirectory.Failure
+		private class AnonymousClassFailure1:MockRAMDirectory.Failure
 		{
 			public AnonymousClassFailure1(TestIndexWriterDelete enclosingInstance)
 			{
@@ -162,13 +162,13 @@
 					modifier.AddDocument(doc);
 				}
 				modifier.Optimize();
-                modifier.Commit();
+				modifier.Commit();
 				
 				Term term = new Term("city", "Amsterdam");
 				int hitCount = GetHitCount(dir, term);
 				Assert.AreEqual(1, hitCount);
 				modifier.DeleteDocuments(term);
-                modifier.Commit();
+				modifier.Commit();
 				hitCount = GetHitCount(dir, term);
 				Assert.AreEqual(0, hitCount);
 				
@@ -201,16 +201,16 @@
 				
 				Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
 				Assert.IsTrue(0 < modifier.GetSegmentCount());
-
-                modifier.Commit();
+				
+				modifier.Commit();
 				
 				IndexReader reader = IndexReader.Open(dir);
 				Assert.AreEqual(7, reader.NumDocs());
 				reader.Close();
 				
 				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-
-                modifier.Commit();
+				
+				modifier.Commit();
 				
 				reader = IndexReader.Open(dir);
 				Assert.AreEqual(0, reader.NumDocs());
@@ -220,53 +220,71 @@
 			}
 		}
 		
+		[Test]
+		public virtual void  TestMaxBufferedDeletes()
+		{
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = (0 == pass);
+				Directory dir = new MockRAMDirectory();
+				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				writer.SetMaxBufferedDeleteTerms(1);
+				writer.DeleteDocuments(new Term("foobar", "1"));
+				writer.DeleteDocuments(new Term("foobar", "1"));
+				writer.DeleteDocuments(new Term("foobar", "1"));
+				Assert.AreEqual(3, writer.GetFlushDeletesCount());
+				writer.Close();
+				dir.Close();
+			}
+		}
+		
 		// test when delete terms only apply to ram segments
 		[Test]
 		public virtual void  TestRAMDeletes()
 		{
 			for (int pass = 0; pass < 2; pass++)
 			{
-                for (int t = 0; t < 2; t++)
-                {
-                    bool autoCommit = (0 == pass);
-                    Directory dir = new MockRAMDirectory();
-                    IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-                    modifier.SetMaxBufferedDocs(4);
-                    modifier.SetMaxBufferedDeleteTerms(4);
-
-                    int id = 0;
-                    int value_Renamed = 100;
-
-                    AddDoc(modifier, ++id, value_Renamed);
-                    if (0 == t)
-                        modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-                    else
-                        modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
-                    AddDoc(modifier, ++id, value_Renamed);
-                    if (0 == t)
-                    {
-                        modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-                        Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
-                        Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
-                    }
-                    else
-                        modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
-
-                    AddDoc(modifier, ++id, value_Renamed);
-                    Assert.AreEqual(0, modifier.GetSegmentCount());
-                    modifier.Flush();
-
-                    modifier.Commit();
-
-                    IndexReader reader = IndexReader.Open(dir);
-                    Assert.AreEqual(1, reader.NumDocs());
-
-                    int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
-                    Assert.AreEqual(1, hitCount);
-                    reader.Close();
-                    modifier.Close();
-                    dir.Close();
-                }
+				for (int t = 0; t < 2; t++)
+				{
+					bool autoCommit = (0 == pass);
+					Directory dir = new MockRAMDirectory();
+					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+					modifier.SetMaxBufferedDocs(4);
+					modifier.SetMaxBufferedDeleteTerms(4);
+					
+					int id = 0;
+					int value_Renamed = 100;
+					
+					AddDoc(modifier, ++id, value_Renamed);
+					if (0 == t)
+						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+					else
+						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
+					AddDoc(modifier, ++id, value_Renamed);
+					if (0 == t)
+					{
+						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+						Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
+						Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
+					}
+					else
+						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
+					
+					AddDoc(modifier, ++id, value_Renamed);
+					Assert.AreEqual(0, modifier.GetSegmentCount());
+					modifier.Flush();
+					
+					modifier.Commit();
+					
+					IndexReader reader = IndexReader.Open(dir);
+					Assert.AreEqual(1, reader.NumDocs());
+					
+					int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
+					Assert.AreEqual(1, hitCount);
+					reader.Close();
+					modifier.Close();
+					dir.Close();
+				}
 			}
 		}
 		
@@ -303,8 +321,8 @@
 					AddDoc(modifier, ++id, value_Renamed);
 				}
 				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-
-                modifier.Commit();
+				
+				modifier.Commit();
 				
 				IndexReader reader = IndexReader.Open(dir);
 				Assert.AreEqual(5, reader.NumDocs());
@@ -340,8 +358,8 @@
 				id = 0;
 				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
 				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-
-                modifier.Commit();
+				
+				modifier.Commit();
 				
 				reader = IndexReader.Open(dir);
 				Assert.AreEqual(5, reader.NumDocs());
@@ -353,8 +371,61 @@
 					terms[i] = new Term("id", System.Convert.ToString(++id));
 				}
 				modifier.DeleteDocuments(terms);
-                modifier.Commit();
-                reader = IndexReader.Open(dir);
+				modifier.Commit();
+				reader = IndexReader.Open(dir);
+				Assert.AreEqual(2, reader.NumDocs());
+				reader.Close();
+				
+				modifier.Close();
+				dir.Close();
+			}
+		}
+		
+		// test deleteAll()
+		[Test]
+		public virtual void  TestDeleteAll()
+		{
+			for (int pass = 0; pass < 2; pass++)
+			{
+				bool autoCommit = (0 == pass);
+				Directory dir = new MockRAMDirectory();
+				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+				modifier.SetMaxBufferedDocs(2);
+				modifier.SetMaxBufferedDeleteTerms(2);
+				
+				int id = 0;
+				int value_Renamed = 100;
+				
+				for (int i = 0; i < 7; i++)
+				{
+					AddDoc(modifier, ++id, value_Renamed);
+				}
+				modifier.Commit();
+				
+				IndexReader reader = IndexReader.Open(dir);
+				Assert.AreEqual(7, reader.NumDocs());
+				reader.Close();
+				
+				// Add 1 doc (so we will have something buffered)
+				AddDoc(modifier, 99, value_Renamed);
+				
+				// Delete all
+				modifier.DeleteAll();
+				
+				// Delete all shouldn't be on disk yet
+				reader = IndexReader.Open(dir);
+				Assert.AreEqual(7, reader.NumDocs());
+				reader.Close();
+				
+				// Add a doc and update a doc (after the deleteAll, before the commit)
+				AddDoc(modifier, 101, value_Renamed);
+				UpdateDoc(modifier, 102, value_Renamed);
+				
+				// commit the delete all
+				modifier.Commit();
+				
+				// Validate there are no docs left
+				reader = IndexReader.Open(dir);
 				Assert.AreEqual(2, reader.NumDocs());
 				reader.Close();
 				
@@ -363,9 +434,105 @@
 			}
 		}
 		
+		// test rollback of deleteAll()
+		[Test]
+		public virtual void  TestDeleteAllRollback()
+		{
+			Directory dir = new MockRAMDirectory();
+			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+			modifier.SetMaxBufferedDocs(2);
+			modifier.SetMaxBufferedDeleteTerms(2);
+			
+			int id = 0;
+			int value_Renamed = 100;
+			
+			for (int i = 0; i < 7; i++)
+			{
+				AddDoc(modifier, ++id, value_Renamed);
+			}
+			modifier.Commit();
+			
+			AddDoc(modifier, ++id, value_Renamed);
+			
+			IndexReader reader = IndexReader.Open(dir);
+			Assert.AreEqual(7, reader.NumDocs());
+			reader.Close();
+			
+			// Delete all
+			modifier.DeleteAll();
+			
+			// Roll it back
+			modifier.Rollback();
+			modifier.Close();
+			
+			// Validate that the docs are still there
+			reader = IndexReader.Open(dir);
+			Assert.AreEqual(7, reader.NumDocs());
+			reader.Close();
+			
+			dir.Close();
+		}
+		
+		
+		// test deleteAll() w/ near real-time reader
+		[Test]
+		public virtual void  TestDeleteAllNRT()
+		{
+			Directory dir = new MockRAMDirectory();
+			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+			modifier.SetMaxBufferedDocs(2);
+			modifier.SetMaxBufferedDeleteTerms(2);
+			
+			int id = 0;
+			int value_Renamed = 100;
+			
+			for (int i = 0; i < 7; i++)
+			{
+				AddDoc(modifier, ++id, value_Renamed);
+			}
+			modifier.Commit();
+			
+			IndexReader reader = modifier.GetReader();
+			Assert.AreEqual(7, reader.NumDocs());
+			reader.Close();
+			
+			AddDoc(modifier, ++id, value_Renamed);
+			AddDoc(modifier, ++id, value_Renamed);
+			
+			// Delete all
+			modifier.DeleteAll();
+			
+			reader = modifier.GetReader();
+			Assert.AreEqual(0, reader.NumDocs());
+			reader.Close();
+			
+			
+			// Roll it back
+			modifier.Rollback();
+			modifier.Close();
+			
+			// Validate that the docs are still there
+			reader = IndexReader.Open(dir);
+			Assert.AreEqual(7, reader.NumDocs());
+			reader.Close();
+			
+			dir.Close();
+		}
+		
+		
+		private void  UpdateDoc(IndexWriter modifier, int id, int value_Renamed)
+		{
+			Document doc = new Document();
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
+			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
+			modifier.UpdateDocument(new Term("id", System.Convert.ToString(id)), doc);
+		}
+		
+		
 		private void  AddDoc(IndexWriter modifier, int id, int value_Renamed)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
 			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
 			doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
@@ -430,7 +597,7 @@
 				while (!done)
 				{
 					MockRAMDirectory dir = new MockRAMDirectory(startDir);
-                    dir.SetPreventDoubleWrite(false);
+					dir.SetPreventDoubleWrite(false);
 					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
 					
 					modifier.SetMaxBufferedDocs(1000); // use flush or close
@@ -446,7 +613,6 @@
 					{
 						
 						double rate = 0.1;
-						//UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1042'"
 						double diskRatio = ((double) diskFree) / diskUsage;
 						long thisDiskFree;
 						System.String testName;
@@ -531,12 +697,12 @@
 							}
 						}
 						
-
-                        // If the Close() succeeded, make sure there are no unreferenced files
-                        if (success)
-                            TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.Close");
-
-                        // Finally, verify index is not corrupt, and, if
+						// If the close() succeeded, make sure there are
+						// no unreferenced files.
+						if (success)
+							TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
+						
+						// Finally, verify index is not corrupt, and, if
 						// we succeeded, we see all docs changed, and if
 						// we failed, we see either all docs or no docs
 						// changed (transactional semantics):
@@ -643,11 +809,11 @@
 					modifier.AddDocument(doc);
 				}
 				// flush (and commit if ac)
-
-                modifier.Optimize();
-                modifier.Commit();
-
-                // one of the two files hits
+				
+				modifier.Optimize();
+				modifier.Commit();
+				
+				// one of the two files hits
 				
 				Term term = new Term("city", "Amsterdam");
 				int hitCount = GetHitCount(dir, term);
@@ -677,14 +843,14 @@
 				// in the !ac case, a new segments file won't be created but in
 				// this case, creation of the cfs file happens next so we need
 				// the doc (to test that it's okay that we don't lose deletes if
-				// failing while creating the cfs file
+				// failing while creating the cfs file)
 				
 				bool failed = false;
 				try
 				{
 					modifier.Commit();
 				}
-				catch (System.IO.IOException)
+				catch (System.IO.IOException ioe)
 				{
 					failed = true;
 				}
@@ -693,16 +859,16 @@
 				
 				// The commit above failed, so we need to retry it (which will
 				// succeed, because the failure is a one-shot)
-
-                modifier.Commit();
+				
+				modifier.Commit();
 				
 				hitCount = GetHitCount(dir, term);
 				
-				// Make sure the delete was successfully flushed
+				// Make sure the delete was successfully flushed:
 				Assert.AreEqual(0, hitCount);
 				
-                modifier.Close();
-                dir.Close();
+				modifier.Close();
+				dir.Close();
 			}
 		}
 		
@@ -741,19 +907,19 @@
 					{
 						modifier.AddDocument(doc);
 					}
-					catch (System.IO.IOException)
+					catch (System.IO.IOException io)
 					{
 						break;
 					}
 				}
 				
-				System.String[] startFiles = dir.List();
+				System.String[] startFiles = dir.ListAll();
 				SegmentInfos infos = new SegmentInfos();
 				infos.Read(dir);
 				new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
-				System.String[] endFiles = dir.List();
+				System.String[] endFiles = dir.ListAll();
 				
-				if (!SupportClass.Compare.CompareStringArrays(startFiles, endFiles))
+				if (!SupportClass.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
 				{
 					Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
 				}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterExceptions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterExceptions.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterExceptions.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterExceptions.cs Tue Nov  3 18:06:27 2009
@@ -1,13 +1,13 @@
-/**
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -15,243 +15,276 @@
  * limitations under the License.
  */
 
+using System;
+
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using _TestUtil = Lucene.Net.Util._TestUtil;
-using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
-using Directory = Lucene.Net.Store.Directory;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using CloseableThreadLocal = Lucene.Net.Util.CloseableThreadLocal;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
+	
     [TestFixture]
-    public class TestIndexWriterExceptions : LuceneTestCase
-    {
-
-        private const bool DEBUG = false;
-
-        private class IndexerThread : SupportClass.ThreadClass
-        {
-            private TestIndexWriterExceptions enclosingInstance;
-            internal IndexWriter writer;
-
-            internal readonly System.Random r = new System.Random(47);
-            internal System.Exception failure;
-
-            public IndexerThread(int i, IndexWriter writer, TestIndexWriterExceptions enclosingInstance)
-                : base("Indexer " + i)
-            {
-                this.writer = writer;
-                this.enclosingInstance = enclosingInstance;
-            }
-
-            override public void Run()
-            {
-
-                Document doc = new Document();
-
-                doc.Add(new Field("content1", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.ANALYZED));
-                doc.Add(new Field("content6", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-                doc.Add(new Field("content2", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NOT_ANALYZED));
-                doc.Add(new Field("content3", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NO));
-
-                doc.Add(new Field("content4", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED));
-                doc.Add(new Field("content5", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED));
-
-                doc.Add(new Field("content7", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-
-                Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
-                doc.Add(idField);
-
-                System.DateTime stopTime = System.DateTime.Now.AddSeconds(3);
-
-                while (System.DateTime.Now < stopTime)
-                {
-                    enclosingInstance.doFail.Set(this);
-                    string id = "" + r.Next(50);
-                    idField.SetValue(id);
-                    Term idTerm = new Term("id", id);
-                    try
-                    {
-                        writer.UpdateDocument(idTerm, doc);
-                    }
-                    catch (System.Exception re)
-                    {
-                        if (DEBUG)
-                        {
-                            System.Console.Out.WriteLine("EXC: ");
-                            System.Console.Out.WriteLine(re.StackTrace);
-                        }
-                        try
-                        {
-                            _TestUtil.CheckIndex(writer.GetDirectory());
-                        }
-                        catch (System.IO.IOException ioe)
-                        {
-                            System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread.Name + ": unexpected exception1");
-                            System.Console.Out.WriteLine(ioe.StackTrace);
-                            failure = ioe;
-                            break;
-                        }
-                        // this, in Java, was catch Throwable, and the catch above (at the same nesting level)
-                        // was catch RuntimeException... as all exceptions in C# are unchecked, these both come
-                        // down to System.Exception
-                        /*
-                    } catch (System.Exception t) {
-                      System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread.Name + ": unexpected exception2");
-                      System.Console.Out.WriteLine(t.StackTrace);
-                      failure = t;
-                      break;
-                         */
-                    }
-
-                    enclosingInstance.doFail.Set(null);
-
-                    // After a possible exception (above) I should be able
-                    // to add a new document without hitting an
-                    // exception:
-                    try
-                    {
-                        writer.UpdateDocument(idTerm, doc);
-                    }
-                    catch (System.Exception t)
-                    {
-                        System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread.Name + ": unexpected exception3");
-                        System.Console.Out.WriteLine(t.StackTrace);
-                        failure = t;
-                        break;
-                    }
-                }
-            }
-        }
-
-        CloseableThreadLocal doFail = new CloseableThreadLocal();
-
-        public class MockIndexWriter : IndexWriter
-        {
-            private TestIndexWriterExceptions enclosingInstance;
-
-            internal System.Random r = new System.Random(17);
-
-            public MockIndexWriter(Directory dir, Analyzer a, bool create, MaxFieldLength mfl, TestIndexWriterExceptions enclosingInstance)
-                : base(dir, a, create, mfl)
-            {
-                this.enclosingInstance = enclosingInstance;
-            }
-
-            protected override bool TestPoint(string name)
-            {
-                if (enclosingInstance.doFail.Get() != null && !name.Equals("startDoFlush") && r.Next(20) == 17)
-                {
-                    if (DEBUG)
-                    {
-                        System.Console.Out.WriteLine(System.Threading.Thread.CurrentThread.Name + ": NOW FAIL: " + name);
-                    }
-                    throw new System.Exception(System.Threading.Thread.CurrentThread.Name + ": intentionally failing at " + name);
-                }
-                return true;
-            }
-        }
-
-        [Test]
-        public void TestRandomExceptions()
-        {
-            MockRAMDirectory dir = new MockRAMDirectory();
-
-            MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, this);
-            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
-            //writer.setMaxBufferedDocs(10);
-            writer.SetRAMBufferSizeMB(0.1);
-
-            if (DEBUG)
-                writer.SetInfoStream(System.Console.Out);
-
-            IndexerThread thread = new IndexerThread(0, writer, this);
-            thread.Run();
-            if (thread.failure != null)
-            {
-                System.Console.Out.WriteLine(thread.failure.StackTrace);
-                Assert.Fail("thread " + thread.Name + ": hit unexpected failure");
-            }
-
-            writer.Commit();
-
-            try
-            {
-                writer.Close();
-            }
-            catch (System.Exception t)
-            {
-                System.Console.Out.WriteLine("exception during close:");
-                System.Console.Out.WriteLine(t.StackTrace);
-                writer.Rollback();
-            }
-
-            // Confirm that when doc hits exception partway through tokenization, it's deleted:
-            IndexReader r2 = IndexReader.Open(dir);
-            int count = r2.DocFreq(new Term("content4", "aaa"));
-            int count2 = r2.DocFreq(new Term("content4", "ddd"));
-            Assert.AreEqual(count, count2);
-            r2.Close();
-
-            _TestUtil.CheckIndex(dir);
-        }
-
-        [Test]
-        public void TestRandomExceptionsThreads()
-        {
-
-            MockRAMDirectory dir = new MockRAMDirectory();
-            MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, this);
-            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).SetSuppressExceptions_ForNUnitTest();
-            //writer.setMaxBufferedDocs(10);
-            writer.SetRAMBufferSizeMB(0.2);
-
-            if (DEBUG)
-                writer.SetInfoStream(System.Console.Out);
-
-            int NUM_THREADS = 4;
-
-            IndexerThread[] threads = new IndexerThread[NUM_THREADS];
-            for (int i = 0; i < NUM_THREADS; i++)
-            {
-                threads[i] = new IndexerThread(i, writer, this);
-                threads[i].Start();
-            }
-
-            for (int i = 0; i < NUM_THREADS; i++)
-                threads[i].Join();
-
-            for (int i = 0; i < NUM_THREADS; i++)
-                if (threads[i].failure != null)
-                    Assert.Fail("thread " + threads[i].Name + ": hit unexpected failure");
-
-            writer.Commit();
-
-            try
-            {
-                writer.Close();
-            }
-            catch (System.Exception t)
-            {
-                System.Console.Out.WriteLine("exception during close:");
-                System.Console.Out.WriteLine(t.StackTrace);
-                writer.Rollback();
-            }
-
-            // Confirm that when doc hits exception partway through tokenization, it's deleted:
-            IndexReader r2 = IndexReader.Open(dir);
-            int count = r2.DocFreq(new Term("content4", "aaa"));
-            int count2 = r2.DocFreq(new Term("content4", "ddd"));
-            Assert.AreEqual(count, count2);
-            r2.Close();
-
-            _TestUtil.CheckIndex(dir);
-        }
-    }
-}
+	public class TestIndexWriterExceptions:LuceneTestCase
+	{
+		
+		private const bool DEBUG = false;
+		
+		private class IndexerThread:SupportClass.ThreadClass
+		{
+			private void  InitBlock(TestIndexWriterExceptions enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestIndexWriterExceptions enclosingInstance;
+			public TestIndexWriterExceptions Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			internal IndexWriter writer;
+			
+			internal System.Random r = new System.Random((System.Int32) 47);
+			internal System.Exception failure;
+			
+			public IndexerThread(TestIndexWriterExceptions enclosingInstance, int i, IndexWriter writer)
+			{
+				InitBlock(enclosingInstance);
+				Name = "Indexer " + i;
+				this.writer = writer;
+			}
+			
+			override public void  Run()
+			{
+				
+				Document doc = new Document();
+				
+				doc.Add(new Field("content1", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.ANALYZED));
+				doc.Add(new Field("content6", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+				doc.Add(new Field("content2", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("content3", "aaa bbb ccc ddd", Field.Store.YES, Field.Index.NO));
+				
+				doc.Add(new Field("content4", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.ANALYZED));
+				doc.Add(new Field("content5", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED));
+				
+				doc.Add(new Field("content7", "aaa bbb ccc ddd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+				
+				Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
+				doc.Add(idField);
+				
+				long stopTime = System.DateTime.Now.Millisecond + 3000;
+				
+				while (System.DateTime.Now.Millisecond < stopTime)
+				{
+					System.Threading.Thread.SetData(Enclosing_Instance.doFail, this);
+					System.String id = "" + r.Next(50);
+					idField.SetValue(id);
+					Term idTerm = new Term("id", id);
+					try
+					{
+						writer.UpdateDocument(idTerm, doc);
+					}
+					catch (System.SystemException re)
+					{
+						if (Lucene.Net.Index.TestIndexWriterExceptions.DEBUG)
+						{
+							System.Console.Out.WriteLine("EXC: ");
+							System.Console.Out.WriteLine(re.StackTrace);
+						}
+						try
+						{
+							_TestUtil.CheckIndex(writer.GetDirectory());
+						}
+						catch (System.IO.IOException ioe)
+						{
+							System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception1");
+							System.Console.Out.WriteLine(ioe.StackTrace);
+							failure = ioe;
+							break;
+						}
+					}
+					catch (System.Exception t)
+					{
+						System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception2");
+						System.Console.Out.WriteLine(t.StackTrace);
+						failure = t;
+						break;
+					}
+					
+					System.Threading.Thread.SetData(Enclosing_Instance.doFail, null);
+					
+					// After a possible exception (above) I should be able
+					// to add a new document without hitting an
+					// exception:
+					try
+					{
+						writer.UpdateDocument(idTerm, doc);
+					}
+					catch (System.Exception t)
+					{
+						System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": unexpected exception3");
+						System.Console.Out.WriteLine(t.StackTrace);
+						failure = t;
+						break;
+					}
+				}
+			}
+		}
+		
+		internal System.LocalDataStoreSlot doFail = System.Threading.Thread.AllocateDataSlot();
+		
+		public class MockIndexWriter:IndexWriter
+		{
+			private void  InitBlock(TestIndexWriterExceptions enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestIndexWriterExceptions enclosingInstance;
+			public TestIndexWriterExceptions Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal System.Random r = new System.Random((System.Int32) 17);
+			
+			public MockIndexWriter(TestIndexWriterExceptions enclosingInstance, Directory dir, Analyzer a, bool create, MaxFieldLength mfl):base(dir, a, create, mfl)
+			{
+				InitBlock(enclosingInstance);
+			}
+			
+			public /*internal*/ override bool TestPoint(System.String name)
+			{
+				if (System.Threading.Thread.GetData(Enclosing_Instance.doFail) != null && !name.Equals("startDoFlush") && r.Next(20) == 17)
+				{
+					if (Lucene.Net.Index.TestIndexWriterExceptions.DEBUG)
+					{
+						System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": NOW FAIL: " + name);
+						//new Throwable().printStackTrace(System.out);
+					}
+					throw new System.SystemException(SupportClass.ThreadClass.Current().Name + ": intentionally failing at " + name);
+				}
+				return true;
+			}
+		}
+		
+		[Test]
+		public virtual void  TestRandomExceptions()
+		{
+			MockRAMDirectory dir = new MockRAMDirectory();
+			
+			MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
+			//writer.setMaxBufferedDocs(10);
+			writer.SetRAMBufferSizeMB(0.1);
+			
+			if (DEBUG)
+			{
+				System.IO.StreamWriter temp_writer;
+				temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding);
+				temp_writer.AutoFlush = true;
+				writer.SetInfoStream(temp_writer);
+			}
+			
+			IndexerThread thread = new IndexerThread(this, 0, writer);
+			thread.Run();
+			if (thread.failure != null)
+			{
+				System.Console.Out.WriteLine(thread.failure.StackTrace);
+				Assert.Fail("thread " + thread.Name + ": hit unexpected failure");
+			}
+			
+			writer.Commit();
+			
+			try
+			{
+				writer.Close();
+			}
+			catch (System.Exception t)
+			{
+				System.Console.Out.WriteLine("exception during close:");
+				System.Console.Out.WriteLine(t.StackTrace);
+				writer.Rollback();
+			}
+			
+			// Confirm that when doc hits exception partway through tokenization, it's deleted:
+			IndexReader r2 = IndexReader.Open(dir);
+			int count = r2.DocFreq(new Term("content4", "aaa"));
+			int count2 = r2.DocFreq(new Term("content4", "ddd"));
+			Assert.AreEqual(count, count2);
+			r2.Close();
+			
+			_TestUtil.CheckIndex(dir);
+		}
+		
+		[Test]
+		public virtual void  TestRandomExceptionsThreads()
+		{
+			
+			MockRAMDirectory dir = new MockRAMDirectory();
+			MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
+			//writer.setMaxBufferedDocs(10);
+			writer.SetRAMBufferSizeMB(0.2);
+			
+			if (DEBUG)
+			{
+				System.IO.StreamWriter temp_writer;
+				temp_writer = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Console.Out.Encoding);
+				temp_writer.AutoFlush = true;
+				writer.SetInfoStream(temp_writer);
+			}
+			
+			int NUM_THREADS = 4;
+			
+			IndexerThread[] threads = new IndexerThread[NUM_THREADS];
+			for (int i = 0; i < NUM_THREADS; i++)
+			{
+				threads[i] = new IndexerThread(this, i, writer);
+				threads[i].Start();
+			}
+			
+			for (int i = 0; i < NUM_THREADS; i++)
+				threads[i].Join();
+			
+			for (int i = 0; i < NUM_THREADS; i++)
+				if (threads[i].failure != null)
+					Assert.Fail("thread " + threads[i].Name + ": hit unexpected failure");
+			
+			writer.Commit();
+			
+			try
+			{
+				writer.Close();
+			}
+			catch (System.Exception t)
+			{
+				System.Console.Out.WriteLine("exception during close:");
+				System.Console.Out.WriteLine(t.StackTrace);
+				writer.Rollback();
+			}
+			
+			// Confirm that when doc hits exception partway through tokenization, it's deleted:
+			IndexReader r2 = IndexReader.Open(dir);
+			int count = r2.DocFreq(new Term("content4", "aaa"));
+			int count2 = r2.DocFreq(new Term("content4", "ddd"));
+			Assert.AreEqual(count, count2);
+			r2.Close();
+			
+			_TestUtil.CheckIndex(dir);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterLockRelease.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterLockRelease.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterLockRelease.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterLockRelease.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -23,6 +23,7 @@
 
 namespace Lucene.Net.Index
 {
+	
 	/// <summary> This tests the patch for issue #LUCENE-715 (IndexWriter does not
 	/// release its write lock when trying to open an index which does not yet
 	/// exist).
@@ -31,18 +32,18 @@
 	/// <version>  $Id$
 	/// </version>
 	
-	[TestFixture]
-	public class TestIndexWriterLockRelease : LuceneTestCase
+    [TestFixture]
+	public class TestIndexWriterLockRelease:LuceneTestCase
 	{
 		private System.IO.FileInfo __test_dir;
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			if (this.__test_dir == null)
 			{
-                System.String tmp_dir = SupportClass.AppSettings.Get("tempDir", "");
+				System.String tmp_dir = SupportClass.AppSettings.Get("java.io.tmpdir", "tmp");
 				this.__test_dir = new System.IO.FileInfo(System.IO.Path.Combine(tmp_dir, "testIndexWriter"));
 				
 				bool tmpBool;
@@ -73,10 +74,10 @@
 		}
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
-            if (this.__test_dir != null && System.IO.Directory.Exists(this.__test_dir.FullName))
+			if (this.__test_dir != null)
 			{
 				System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(this.__test_dir);
 				
@@ -122,7 +123,7 @@
 		}
 		
 		[Test]
-		public virtual void  _TestIndexWriterLockRelease()
+		public virtual void  TestIndexWriterLockRelease_Renamed()
 		{
 			IndexWriter im;
 			
@@ -130,13 +131,13 @@
 			{
 				im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 			}
-			catch (System.IO.FileNotFoundException)
+			catch (System.IO.FileNotFoundException e)
 			{
 				try
 				{
-                    im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
+					im = new IndexWriter(this.__test_dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 				}
-				catch (System.IO.FileNotFoundException)
+				catch (System.IO.FileNotFoundException e1)
 				{
 				}
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMergePolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterMergePolicy.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMergePolicy.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMergePolicy.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,19 +19,19 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using _TestUtil = Lucene.Net.Util._TestUtil;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
-	[TestFixture]
-	public class TestIndexWriterMergePolicy : LuceneTestCase
+    [TestFixture]
+	public class TestIndexWriterMergePolicy:LuceneTestCase
 	{
 		
 		// Test the normal case
@@ -39,11 +39,11 @@
 		public virtual void  TestNormalCase()
 		{
 			Directory dir = new RAMDirectory();
-
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			
 			for (int i = 0; i < 100; i++)
 			{
@@ -59,11 +59,11 @@
 		public virtual void  TestNoOverMerge()
 		{
 			Directory dir = new RAMDirectory();
-
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			
 			bool noOverMerge = false;
 			for (int i = 0; i < 100; i++)
@@ -80,16 +80,16 @@
 			writer.Close();
 		}
 		
-		// Test the case where flush is forced after every AddDoc
+		// Test the case where flush is forced after every addDoc
 		[Test]
 		public virtual void  TestForceFlush()
 		{
 			Directory dir = new RAMDirectory();
-
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
-			LogDocMergePolicy mp = new LogDocMergePolicy();
+			LogDocMergePolicy mp = new LogDocMergePolicy(writer);
 			mp.SetMinMergeDocs(100);
 			writer.SetMergePolicy(mp);
 			
@@ -97,10 +97,10 @@
 			{
 				AddDoc(writer);
 				writer.Close();
-
-                writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
+				
+				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 				writer.SetMaxBufferedDocs(10);
-				writer.SetMergeFactor(10);
+				writer.SetMergePolicy(mp);
 				mp.SetMinMergeDocs(100);
 				writer.SetMergeFactor(10);
 				CheckInvariants(writer);
@@ -114,11 +114,11 @@
 		public virtual void  TestMergeFactorChange()
 		{
 			Directory dir = new RAMDirectory();
-
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(100);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			
 			for (int i = 0; i < 250; i++)
 			{
@@ -144,11 +144,11 @@
 		public virtual void  TestMaxBufferedDocsChange()
 		{
 			Directory dir = new RAMDirectory();
-
-            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), true);
 			writer.SetMaxBufferedDocs(101);
 			writer.SetMergeFactor(101);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			
 			// leftmost* segment has 1 doc
 			// rightmost* segment has 100 docs
@@ -160,11 +160,11 @@
 					CheckInvariants(writer);
 				}
 				writer.Close();
-
-                writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+				
+				writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), false);
 				writer.SetMaxBufferedDocs(101);
 				writer.SetMergeFactor(101);
-				writer.SetMergePolicy(new LogDocMergePolicy());
+				writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			}
 			
 			writer.SetMaxBufferedDocs(10);
@@ -194,7 +194,7 @@
 			Directory dir = new RAMDirectory();
 			
 			IndexWriter writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), true);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(100);
 			
@@ -210,7 +210,7 @@
 			reader.Close();
 			
 			writer = new IndexWriter(dir, true, new WhitespaceAnalyzer(), false);
-			writer.SetMergePolicy(new LogDocMergePolicy());
+			writer.SetMergePolicy(new LogDocMergePolicy(writer));
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(5);
 			
@@ -227,7 +227,7 @@
 		
 		private void  AddDoc(IndexWriter writer)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}
@@ -277,7 +277,7 @@
 				Assert.IsTrue(numSegments < mergeFactor);
 			}
 			
-			System.String[] files = writer.GetDirectory().List();
+			System.String[] files = writer.GetDirectory().ListAll();
 			int segmentCfsCount = 0;
 			for (int i = 0; i < files.Length; i++)
 			{
@@ -289,14 +289,15 @@
 			Assert.AreEqual(segmentCount, segmentCfsCount);
 		}
 		
-		private void  PrintSegmentDocCounts(IndexWriter writer)
-		{
-			int segmentCount = writer.GetSegmentCount();
-			System.Console.Out.WriteLine("" + segmentCount + " segments total");
-			for (int i = 0; i < segmentCount; i++)
-			{
-				System.Console.Out.WriteLine("  segment " + i + " has " + writer.GetDocCount(i) + " docs");
-			}
+		/*
+		private void printSegmentDocCounts(IndexWriter writer) {
+		int segmentCount = writer.getSegmentCount();
+		System.out.println("" + segmentCount + " segments total");
+		for (int i = 0; i < segmentCount; i++) {
+		System.out.println("  segment " + i + " has " + writer.getDocCount(i)
+		+ " docs");
+		}
 		}
+		*/
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMerging.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterMerging.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMerging.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterMerging.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,19 +19,19 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
 	
-	[TestFixture]
-	public class TestIndexWriterMerging : LuceneTestCase
+    [TestFixture]
+	public class TestIndexWriterMerging:LuceneTestCase
 	{
 		
 		/// <summary> Tests that index merging (specifically addIndexes()) doesn't
@@ -42,7 +42,7 @@
 		{
 			
 			int num = 100;
-
+			
 			Directory indexA = new MockRAMDirectory();
 			Directory indexB = new MockRAMDirectory();
 			
@@ -59,7 +59,7 @@
 			{
 				Assert.Fail("Index b is invalid");
 			}
-
+			
 			Directory merged = new MockRAMDirectory();
 			
 			IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
@@ -82,7 +82,7 @@
 			int max = reader.MaxDoc();
 			for (int i = 0; i < max; i++)
 			{
-				Lucene.Net.Documents.Document temp = reader.Document(i);
+				Document temp = reader.Document(i);
 				//System.out.println("doc "+i+"="+temp.getField("count").stringValue());
 				//compare the index doc number to the value that it should be
 				if (!temp.GetField("count").StringValue().Equals((i + startAt) + ""))
@@ -97,14 +97,14 @@
 		
 		private void  FillIndex(Directory dir, int start, int numDocs)
 		{
-
-            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMergeFactor(2);
 			writer.SetMaxBufferedDocs(2);
 			
 			for (int i = start; i < (start + numDocs); i++)
 			{
-				Lucene.Net.Documents.Document temp = new Lucene.Net.Documents.Document();
+				Document temp = new Document();
 				temp.Add(new Field("count", ("" + i), Field.Store.YES, Field.Index.NOT_ANALYZED));
 				
 				writer.AddDocument(temp);

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterReader.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,1060 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
+using TermVector = Lucene.Net.Documents.Field.TermVector;
+using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using Query = Lucene.Net.Search.Query;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+
+namespace Lucene.Net.Index
+{
+	
+    [TestFixture]
+	public class TestIndexWriterReader:LuceneTestCase
+	{
+		private class AnonymousClassThread:SupportClass.ThreadClass
+		{
+			public AnonymousClassThread(long endTime, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.Directory[] dirs, System.Collections.IList excs, TestIndexWriterReader enclosingInstance)
+			{
+				InitBlock(endTime, writer, dirs, excs, enclosingInstance);
+			}
+			private void  InitBlock(long endTime, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.Directory[] dirs, System.Collections.IList excs, TestIndexWriterReader enclosingInstance)
+			{
+				this.endTime = endTime;
+				this.writer = writer;
+				this.dirs = dirs;
+				this.excs = excs;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private long endTime;
+			private Lucene.Net.Index.IndexWriter writer;
+			private Lucene.Net.Store.Directory[] dirs;
+			private System.Collections.IList excs;
+			private TestIndexWriterReader enclosingInstance;
+			public TestIndexWriterReader Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			override public void  Run()
+			{
+				while (System.DateTime.Now.Millisecond < endTime)
+				{
+					try
+					{
+						writer.AddIndexesNoOptimize(dirs);
+					}
+					catch (System.Exception t)
+					{
+						excs.Add(t);
+						throw new System.SystemException("", t);
+					}
+				}
+			}
+		}
+		private class AnonymousClassThread1:SupportClass.ThreadClass
+		{
+			public AnonymousClassThread1(long endTime, Lucene.Net.Index.IndexWriter writer, System.Collections.IList excs, TestIndexWriterReader enclosingInstance)
+			{
+				InitBlock(endTime, writer, excs, enclosingInstance);
+			}
+			private void  InitBlock(long endTime, Lucene.Net.Index.IndexWriter writer, System.Collections.IList excs, TestIndexWriterReader enclosingInstance)
+			{
+				this.endTime = endTime;
+				this.writer = writer;
+				this.excs = excs;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private long endTime;
+			private Lucene.Net.Index.IndexWriter writer;
+			private System.Collections.IList excs;
+			private TestIndexWriterReader enclosingInstance;
+			public TestIndexWriterReader Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			override public void  Run()
+			{
+				int count = 0;
+				System.Random r = new System.Random();
+				while (System.DateTime.Now.Millisecond < endTime)
+				{
+					try
+					{
+						for (int i = 0; i < 10; i++)
+						{
+							writer.AddDocument(Lucene.Net.Index.TestIndexWriterReader.CreateDocument(10 * count + i, "test", 4));
+						}
+						count++;
+						int limit = count * 10;
+						for (int i = 0; i < 5; i++)
+						{
+							int x = r.Next(limit);
+							writer.DeleteDocuments(new Term("field3", "b" + x));
+						}
+					}
+					catch (System.Exception t)
+					{
+						excs.Add(t);
+						throw new System.SystemException("", t);
+					}
+				}
+			}
+		}
+		internal static System.IO.StreamWriter infoStream;
+		
+		public class HeavyAtomicInt
+		{
+			private int value_Renamed;
+			public HeavyAtomicInt(int start)
+			{
+				value_Renamed = start;
+			}
+			public virtual int AddAndGet(int inc)
+			{
+				lock (this)
+				{
+					value_Renamed += inc;
+					return value_Renamed;
+				}
+			}
+			public virtual int IncrementAndGet()
+			{
+				lock (this)
+				{
+					value_Renamed++;
+					return value_Renamed;
+				}
+			}
+			public virtual int IntValue()
+			{
+				lock (this)
+				{
+					return value_Renamed;
+				}
+			}
+		}
+		
+		public static int Count(Term t, IndexReader r)
+		{
+			int count = 0;
+			TermDocs td = r.TermDocs(t);
+			while (td.Next())
+			{
+				td.Doc();
+				count++;
+			}
+			td.Close();
+			return count;
+		}
+		
+		[Test]
+		public virtual void  TestUpdateDocument()
+		{
+			bool optimize = true;
+			
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			
+			// create the index
+			CreateIndexNoClose(!optimize, "index1", writer);
+			
+			// writer.flush(false, true, true);
+			
+			// get a reader
+			IndexReader r1 = writer.GetReader();
+			
+			System.String id10 = r1.Document(10).GetField("id").StringValue();
+			
+			Document newDoc = r1.Document(10);
+			newDoc.RemoveField("id");
+			newDoc.Add(new Field("id", System.Convert.ToString(8000), Field.Store.YES, Field.Index.NOT_ANALYZED));
+			writer.UpdateDocument(new Term("id", id10), newDoc);
+			
+			IndexReader r2 = writer.GetReader();
+			Assert.AreEqual(0, Count(new Term("id", id10), r2));
+			Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r2));
+			
+			r1.Close();
+			r2.Close();
+			writer.Close();
+			
+			IndexReader r3 = IndexReader.Open(dir1);
+			Assert.AreEqual(0, Count(new Term("id", id10), r3));
+			Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r3));
+			r3.Close();
+			
+			dir1.Close();
+		}
+		
+		/// <summary> Test using IW.addIndexes
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+		public virtual void  TestAddIndexes()
+		{
+			bool optimize = false;
+			
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			// create the index
+			CreateIndexNoClose(!optimize, "index1", writer);
+			writer.Flush(false, true, true);
+			
+			// create a 2nd index
+			Directory dir2 = new MockRAMDirectory();
+			IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer2.SetInfoStream(infoStream);
+			CreateIndexNoClose(!optimize, "index2", writer2);
+			writer2.Close();
+			
+			writer.AddIndexesNoOptimize(new Directory[]{dir2});
+			
+			IndexReader r1 = writer.GetReader();
+			Assert.AreEqual(200, r1.MaxDoc());
+			
+			int index2df = r1.DocFreq(new Term("indexname", "index2"));
+			
+			Assert.AreEqual(100, index2df);
+			
+			// verify the docs are from different indexes
+			Document doc5 = r1.Document(5);
+			Assert.AreEqual("index1", doc5.Get("indexname"));
+			Document doc150 = r1.Document(150);
+			Assert.AreEqual("index2", doc150.Get("indexname"));
+			r1.Close();
+			writer.Close();
+			dir1.Close();
+		}
+		
+		[Test]
+		public virtual void  TestAddIndexes2()
+		{
+			bool optimize = false;
+			
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			
+			// create a 2nd index
+			Directory dir2 = new MockRAMDirectory();
+			IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer2.SetInfoStream(infoStream);
+			CreateIndexNoClose(!optimize, "index2", writer2);
+			writer2.Close();
+			
+			writer.AddIndexesNoOptimize(new Directory[]{dir2});
+			writer.AddIndexesNoOptimize(new Directory[]{dir2});
+			writer.AddIndexesNoOptimize(new Directory[]{dir2});
+			writer.AddIndexesNoOptimize(new Directory[]{dir2});
+			writer.AddIndexesNoOptimize(new Directory[]{dir2});
+			
+			IndexReader r1 = writer.GetReader();
+			Assert.AreEqual(500, r1.MaxDoc());
+			
+			r1.Close();
+			writer.Close();
+			dir1.Close();
+		}
+		
+		/// <summary> Deletes using IW.deleteDocuments
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+		public virtual void  TestDeleteFromIndexWriter()
+		{
+			bool optimize = true;
+			
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			// create the index
+			CreateIndexNoClose(!optimize, "index1", writer);
+			writer.Flush(false, true, true);
+			// get a reader
+			IndexReader r1 = writer.GetReader();
+			
+			System.String id10 = r1.Document(10).GetField("id").StringValue();
+			
+			// deleted IW docs should not show up in the next getReader
+			writer.DeleteDocuments(new Term("id", id10));
+			IndexReader r2 = writer.GetReader();
+			Assert.AreEqual(1, Count(new Term("id", id10), r1));
+			Assert.AreEqual(0, Count(new Term("id", id10), r2));
+			
+			System.String id50 = r1.Document(50).GetField("id").StringValue();
+			Assert.AreEqual(1, Count(new Term("id", id50), r1));
+			
+			writer.DeleteDocuments(new Term("id", id50));
+			
+			IndexReader r3 = writer.GetReader();
+			Assert.AreEqual(0, Count(new Term("id", id10), r3));
+			Assert.AreEqual(0, Count(new Term("id", id50), r3));
+			
+			System.String id75 = r1.Document(75).GetField("id").StringValue();
+			writer.DeleteDocuments(new TermQuery(new Term("id", id75)));
+			IndexReader r4 = writer.GetReader();
+			Assert.AreEqual(1, Count(new Term("id", id75), r3));
+			Assert.AreEqual(0, Count(new Term("id", id75), r4));
+			
+			r1.Close();
+			r2.Close();
+			r3.Close();
+			r4.Close();
+			writer.Close();
+			
+			// reopen the writer to verify the delete made it to the directory
+			writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			IndexReader w2r1 = writer.GetReader();
+			Assert.AreEqual(0, Count(new Term("id", id10), w2r1));
+			w2r1.Close();
+			writer.Close();
+			dir1.Close();
+		}
+		
+		[Test]
+		public virtual void  TestAddIndexesAndDoDeletesThreads()
+		{
+			int numIter = 5;
+			int numDirs = 3;
+			
+			Directory mainDir = new MockRAMDirectory();
+			IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			mainWriter.SetInfoStream(infoStream);
+			AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(this, numIter, mainWriter);
+			addDirThreads.LaunchThreads(numDirs);
+			addDirThreads.JoinThreads();
+			
+			//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.NUM_THREADS
+			//    * addDirThreads.NUM_INIT_DOCS, addDirThreads.mainWriter.numDocs());
+			Assert.AreEqual(addDirThreads.count.IntValue(), addDirThreads.mainWriter.NumDocs());
+			
+			addDirThreads.Close(true);
+			
+			Assert.IsTrue(addDirThreads.failures.Count == 0);
+			
+			_TestUtil.CheckIndex(mainDir);
+			
+			IndexReader reader = IndexReader.Open(mainDir);
+			Assert.AreEqual(addDirThreads.count.IntValue(), reader.NumDocs());
+			//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.NUM_THREADS
+			//    * addDirThreads.NUM_INIT_DOCS, reader.numDocs());
+			reader.Close();
+			
+			addDirThreads.CloseDir();
+			mainDir.Close();
+		}
+		
+		private class DeleteThreads
+		{
+			private class AnonymousClassThread2:SupportClass.ThreadClass
+			{
+				public AnonymousClassThread2(DeleteThreads enclosingInstance)
+				{
+					InitBlock(enclosingInstance);
+				}
+				private void  InitBlock(DeleteThreads enclosingInstance)
+				{
+					this.enclosingInstance = enclosingInstance;
+				}
+				private DeleteThreads enclosingInstance;
+				public DeleteThreads Enclosing_Instance
+				{
+					get
+					{
+						return enclosingInstance;
+					}
+					
+				}
+				override public void  Run()
+				{
+					try
+					{
+						Term term = Enclosing_Instance.GetDeleteTerm();
+						Enclosing_Instance.mainWriter.DeleteDocuments(term);
+						lock (Enclosing_Instance.deletedTerms.SyncRoot)
+						{
+							Enclosing_Instance.deletedTerms.Add(term);
+						}
+					}
+					catch (System.Exception t)
+					{
+						Enclosing_Instance.Handle(t);
+					}
+				}
+			}
+			private void  InitBlock(TestIndexWriterReader enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+				threads = new SupportClass.ThreadClass[NUM_THREADS];
+			}
+			private TestIndexWriterReader enclosingInstance;
+			public TestIndexWriterReader Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal const int NUM_THREADS = 5;
+			internal SupportClass.ThreadClass[] threads;
+			internal IndexWriter mainWriter;
+			internal System.Collections.IList deletedTerms = new System.Collections.ArrayList();
+			internal System.Collections.ArrayList toDeleteTerms = new System.Collections.ArrayList();
+			internal System.Random random;
+			internal System.Collections.IList failures = new System.Collections.ArrayList();
+			
+			public DeleteThreads(TestIndexWriterReader enclosingInstance, IndexWriter mainWriter)
+			{
+				InitBlock(enclosingInstance);
+				this.mainWriter = mainWriter;
+				IndexReader reader = mainWriter.GetReader();
+				int maxDoc = reader.MaxDoc();
+				random = Enclosing_Instance.NewRandom();
+				int iter = random.Next(maxDoc);
+				for (int x = 0; x < iter; x++)
+				{
+					int doc = random.Next(iter);
+					System.String id = reader.Document(doc).Get("id");
+					toDeleteTerms.Add(new Term("id", id));
+				}
+			}
+			
+			internal virtual Term GetDeleteTerm()
+			{
+				lock (toDeleteTerms.SyncRoot)
+				{
+					System.Object tempObject;
+					tempObject = toDeleteTerms[0];
+					toDeleteTerms.RemoveAt(0);
+					return (Term) tempObject;
+				}
+			}
+			
+			internal virtual void  LaunchThreads(int numIter)
+			{
+				for (int i = 0; i < NUM_THREADS; i++)
+				{
+					threads[i] = new AnonymousClassThread2(this);
+				}
+			}
+			
+			internal virtual void  Handle(System.Exception t)
+			{
+				System.Console.Out.WriteLine(t.StackTrace);
+				lock (failures.SyncRoot)
+				{
+					failures.Add(t);
+				}
+			}
+			
+			internal virtual void  JoinThreads()
+			{
+				for (int i = 0; i < NUM_THREADS; i++)
+					try
+					{
+						threads[i].Join();
+					}
+					catch (System.Threading.ThreadInterruptedException ie)
+					{
+						SupportClass.ThreadClass.Current().Interrupt();
+					}
+			}
+		}
+		
+		private class AddDirectoriesThreads
+		{
+			private class AnonymousClassThread2:SupportClass.ThreadClass
+			{
+				public AnonymousClassThread2(int numIter, AddDirectoriesThreads enclosingInstance)
+				{
+					InitBlock(numIter, enclosingInstance);
+				}
+				private void  InitBlock(int numIter, AddDirectoriesThreads enclosingInstance)
+				{
+					this.numIter = numIter;
+					this.enclosingInstance = enclosingInstance;
+				}
+				private int numIter;
+				private AddDirectoriesThreads enclosingInstance;
+				public AddDirectoriesThreads Enclosing_Instance
+				{
+					get
+					{
+						return enclosingInstance;
+					}
+					
+				}
+				override public void  Run()
+				{
+					try
+					{
+						Directory[] dirs = new Directory[Enclosing_Instance.numDirs];
+						for (int k = 0; k < Enclosing_Instance.numDirs; k++)
+							dirs[k] = new MockRAMDirectory(Enclosing_Instance.addDir);
+						//int j = 0;
+						//while (true) {
+						// System.out.println(Thread.currentThread().getName() + ": iter
+						// j=" + j);
+						for (int x = 0; x < numIter; x++)
+						{
+							// only do addIndexesNoOptimize
+							Enclosing_Instance.DoBody(x, dirs);
+						}
+						//if (numIter > 0 && j == numIter)
+						//  break;
+						//doBody(j++, dirs);
+						//doBody(5, dirs);
+						//}
+					}
+					catch (System.Exception t)
+					{
+						Enclosing_Instance.Handle(t);
+					}
+				}
+			}
+			private void  InitBlock(TestIndexWriterReader enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+				threads = new SupportClass.ThreadClass[NUM_THREADS];
+			}
+			private TestIndexWriterReader enclosingInstance;
+			public TestIndexWriterReader Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal Directory addDir;
+			internal const int NUM_THREADS = 5;
+			internal const int NUM_INIT_DOCS = 100;
+			internal int numDirs;
+			internal SupportClass.ThreadClass[] threads;
+			internal IndexWriter mainWriter;
+			internal System.Collections.IList failures = new System.Collections.ArrayList();
+			internal IndexReader[] readers;
+			internal bool didClose = false;
+			internal HeavyAtomicInt count = new HeavyAtomicInt(0);
+			internal HeavyAtomicInt numAddIndexesNoOptimize = new HeavyAtomicInt(0);
+			
+			public AddDirectoriesThreads(TestIndexWriterReader enclosingInstance, int numDirs, IndexWriter mainWriter)
+			{
+				InitBlock(enclosingInstance);
+				this.numDirs = numDirs;
+				this.mainWriter = mainWriter;
+				addDir = new MockRAMDirectory();
+				IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+				writer.SetMaxBufferedDocs(2);
+				for (int i = 0; i < NUM_INIT_DOCS; i++)
+				{
+					Document doc = Lucene.Net.Index.TestIndexWriterReader.CreateDocument(i, "addindex", 4);
+					writer.AddDocument(doc);
+				}
+				
+				writer.Close();
+				
+				readers = new IndexReader[numDirs];
+				for (int i = 0; i < numDirs; i++)
+					readers[i] = IndexReader.Open(addDir);
+			}
+			
+			internal virtual void  JoinThreads()
+			{
+				for (int i = 0; i < NUM_THREADS; i++)
+					try
+					{
+						threads[i].Join();
+					}
+					catch (System.Threading.ThreadInterruptedException ie)
+					{
+						SupportClass.ThreadClass.Current().Interrupt();
+					}
+			}
+			
+			internal virtual void  Close(bool doWait)
+			{
+				didClose = true;
+				mainWriter.Close(doWait);
+			}
+			
+			internal virtual void  CloseDir()
+			{
+				for (int i = 0; i < numDirs; i++)
+					readers[i].Close();
+				addDir.Close();
+			}
+			
+			internal virtual void  Handle(System.Exception t)
+			{
+				System.Console.Out.WriteLine(t.StackTrace);
+				lock (failures.SyncRoot)
+				{
+					failures.Add(t);
+				}
+			}
+			
+			internal virtual void  LaunchThreads(int numIter)
+			{
+				for (int i = 0; i < NUM_THREADS; i++)
+				{
+					threads[i] = new AnonymousClassThread2(numIter, this);
+				}
+				for (int i = 0; i < NUM_THREADS; i++)
+					threads[i].Start();
+			}
+			
+			internal virtual void  DoBody(int j, Directory[] dirs)
+			{
+				switch (j % 4)
+				{
+					
+					case 0: 
+						mainWriter.AddIndexes(dirs);
+						break;
+					
+					case 1: 
+						mainWriter.AddIndexesNoOptimize(dirs);
+						numAddIndexesNoOptimize.IncrementAndGet();
+						break;
+					
+					case 2: 
+						mainWriter.AddIndexes(readers);
+						break;
+					
+					case 3: 
+						mainWriter.Commit();
+						break;
+					}
+				count.AddAndGet(dirs.Length * NUM_INIT_DOCS);
+			}
+		}
+		
+		[Test]
+		public virtual void  TestIndexWriterReopenSegmentOptimize()
+		{
+			DoTestIndexWriterReopenSegment(true);
+		}
+		
+		[Test]
+		public virtual void  TestIndexWriterReopenSegment()
+		{
+			DoTestIndexWriterReopenSegment(false);
+		}
+		
+		/// <summary> Tests creating a segment, then check to insure the segment can be seen via
+		/// IW.getReader
+		/// </summary>
+		public virtual void  DoTestIndexWriterReopenSegment(bool optimize)
+		{
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			IndexReader r1 = writer.GetReader();
+			Assert.AreEqual(0, r1.MaxDoc());
+			CreateIndexNoClose(false, "index1", writer);
+			writer.Flush(!optimize, true, true);
+			
+			IndexReader iwr1 = writer.GetReader();
+			Assert.AreEqual(100, iwr1.MaxDoc());
+			
+			IndexReader r2 = writer.GetReader();
+			Assert.AreEqual(r2.MaxDoc(), 100);
+			// add 100 documents
+			for (int x = 10000; x < 10000 + 100; x++)
+			{
+				Document d = CreateDocument(x, "index1", 5);
+				writer.AddDocument(d);
+			}
+			writer.Flush(false, true, true);
+			// verify the reader was reopened internally
+			IndexReader iwr2 = writer.GetReader();
+			Assert.IsTrue(iwr2 != r1);
+			Assert.AreEqual(200, iwr2.MaxDoc());
+			// should have flushed out a segment
+			IndexReader r3 = writer.GetReader();
+			Assert.IsTrue(r2 != r3);
+			Assert.AreEqual(200, r3.MaxDoc());
+			
+			// dec ref the readers rather than close them because
+			// closing flushes changes to the writer
+			r1.Close();
+			iwr1.Close();
+			r2.Close();
+			r3.Close();
+			iwr2.Close();
+			writer.Close();
+			
+			// test whether the changes made it to the directory
+			writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			IndexReader w2r1 = writer.GetReader();
+			// insure the deletes were actually flushed to the directory
+			Assert.AreEqual(200, w2r1.MaxDoc());
+			w2r1.Close();
+			writer.Close();
+			
+			dir1.Close();
+		}
+		
+		
+		public static Document CreateDocument(int n, System.String indexName, int numFields)
+		{
+			System.Text.StringBuilder sb = new System.Text.StringBuilder();
+			Document doc = new Document();
+			doc.Add(new Field("id", System.Convert.ToString(n), Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("indexname", indexName, Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
+			sb.Append("a");
+			sb.Append(n);
+			doc.Add(new Field("field1", sb.ToString(), Field.Store.YES, Field.Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
+			sb.Append(" b");
+			sb.Append(n);
+			for (int i = 1; i < numFields; i++)
+			{
+				doc.Add(new Field("field" + (i + 1), sb.ToString(), Field.Store.YES, Field.Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
+			}
+			return doc;
+		}
+		
+		/// <summary> Delete a document by term and return the doc id
+		/// 
+		/// </summary>
+		/// <returns>
+		/// 
+		/// public static int deleteDocument(Term term, IndexWriter writer) throws
+		/// IOException { IndexReader reader = writer.getReader(); TermDocs td =
+		/// reader.termDocs(term); int doc = -1; //if (td.next()) { // doc = td.doc();
+		/// //} //writer.deleteDocuments(term); td.close(); return doc; }
+		/// </returns>
+		public static void  CreateIndex(Directory dir1, System.String indexName, bool multiSegment)
+		{
+			IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			w.SetMergePolicy(new LogDocMergePolicy(w));
+			for (int i = 0; i < 100; i++)
+			{
+				w.AddDocument(CreateDocument(i, indexName, 4));
+				if (multiSegment && (i % 10) == 0)
+				{
+				}
+			}
+			if (!multiSegment)
+			{
+				w.Optimize();
+			}
+			w.Close();
+		}
+		
+		public static void  CreateIndexNoClose(bool multiSegment, System.String indexName, IndexWriter w)
+		{
+			for (int i = 0; i < 100; i++)
+			{
+				w.AddDocument(CreateDocument(i, indexName, 4));
+			}
+			if (!multiSegment)
+			{
+				w.Optimize();
+			}
+		}
+		
+		private class MyWarmer:IndexWriter.IndexReaderWarmer
+		{
+			internal int warmCount;
+			public override void  Warm(IndexReader reader)
+			{
+				warmCount++;
+			}
+		}
+		
+		[Test]
+		public virtual void  TestMergeWarmer()
+		{
+			
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			
+			// create the index
+			CreateIndexNoClose(false, "test", writer);
+			
+			// get a reader to put writer into near real-time mode
+			IndexReader r1 = writer.GetReader();
+			
+			// Enroll warmer
+			MyWarmer warmer = new MyWarmer();
+			writer.SetMergedSegmentWarmer(warmer);
+			writer.SetMergeFactor(2);
+			writer.SetMaxBufferedDocs(2);
+			
+			for (int i = 0; i < 10; i++)
+			{
+				writer.AddDocument(CreateDocument(i, "test", 4));
+			}
+			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).Sync();
+			
+			Assert.IsTrue(warmer.warmCount > 0);
+			int count = warmer.warmCount;
+			
+			writer.AddDocument(CreateDocument(17, "test", 4));
+			writer.Optimize();
+			Assert.IsTrue(warmer.warmCount > count);
+			
+			writer.Close();
+			r1.Close();
+			dir1.Close();
+		}
+		
+		[Test]
+		public virtual void  TestAfterCommit()
+		{
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			
+			// create the index
+			CreateIndexNoClose(false, "test", writer);
+			
+			// get a reader to put writer into near real-time mode
+			IndexReader r1 = writer.GetReader();
+			_TestUtil.CheckIndex(dir1);
+			writer.Commit();
+			_TestUtil.CheckIndex(dir1);
+			Assert.AreEqual(100, r1.NumDocs());
+			
+			for (int i = 0; i < 10; i++)
+			{
+				writer.AddDocument(CreateDocument(i, "test", 4));
+			}
+			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).Sync();
+			
+			IndexReader r2 = r1.Reopen();
+			if (r2 != r1)
+			{
+				r1.Close();
+				r1 = r2;
+			}
+			Assert.AreEqual(110, r1.NumDocs());
+			writer.Close();
+			r1.Close();
+			dir1.Close();
+		}
+		
+		// Make sure reader remains usable even if IndexWriter closes
+		[Test]
+		public virtual void  TestAfterClose()
+		{
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			
+			// create the index
+			CreateIndexNoClose(false, "test", writer);
+			
+			IndexReader r = writer.GetReader();
+			writer.Close();
+			
+			_TestUtil.CheckIndex(dir1);
+			
+			// reader should remain usable even after IndexWriter is closed:
+			Assert.AreEqual(100, r.NumDocs());
+			Query q = new TermQuery(new Term("indexname", "test"));
+			Assert.AreEqual(100, new IndexSearcher(r).Search(q, 10).totalHits);
+			
+			try
+			{
+				r.Reopen();
+				Assert.Fail("failed to hit AlreadyClosedException");
+			}
+			catch (AlreadyClosedException ace)
+			{
+				// expected
+			}
+			r.Close();
+			dir1.Close();
+		}
+		
+		// Stress test reopen during addIndexes
+		[Test]
+		public virtual void  TestDuringAddIndexes()
+		{
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			writer.SetMergeFactor(2);
+			
+			// create the index
+			CreateIndexNoClose(false, "test", writer);
+			writer.Commit();
+			
+			Directory[] dirs = new Directory[10];
+			for (int i = 0; i < 10; i++)
+			{
+				dirs[i] = new MockRAMDirectory(dir1);
+			}
+			
+			IndexReader r = writer.GetReader();
+			
+			int NUM_THREAD = 5;
+			float SECONDS = 3;
+			
+			long endTime = (long) (System.DateTime.Now.Millisecond + 1000.0 * SECONDS);
+			System.Collections.IList excs = (System.Collections.IList) System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));
+			
+			SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[NUM_THREAD];
+			for (int i = 0; i < NUM_THREAD; i++)
+			{
+				threads[i] = new AnonymousClassThread(endTime, writer, dirs, excs, this);
+				threads[i].IsBackground = true;
+				threads[i].Start();
+			}
+			
+			int lastCount = 0;
+			while (System.DateTime.Now.Millisecond < endTime)
+			{
+				IndexReader r2 = r.Reopen();
+				if (r2 != r)
+				{
+					r.Close();
+					r = r2;
+				}
+				Query q = new TermQuery(new Term("indexname", "test"));
+				int count = new IndexSearcher(r).Search(q, 10).totalHits;
+				Assert.IsTrue(count >= lastCount);
+				lastCount = count;
+			}
+			
+			for (int i = 0; i < NUM_THREAD; i++)
+			{
+				threads[i].Join();
+			}
+			
+			Assert.AreEqual(0, excs.Count);
+			writer.Close();
+			
+			_TestUtil.CheckIndex(dir1);
+			r.Close();
+			dir1.Close();
+		}
+		
+		// Stress test reopen during add/delete
+		[Test]
+		public virtual void  TestDuringAddDelete()
+		{
+			Directory dir1 = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetInfoStream(infoStream);
+			writer.SetMergeFactor(2);
+			
+			// create the index
+			CreateIndexNoClose(false, "test", writer);
+			writer.Commit();
+			
+			IndexReader r = writer.GetReader();
+			
+			int NUM_THREAD = 5;
+			float SECONDS = 3;
+			
+			long endTime = (long) (System.DateTime.Now.Millisecond + 1000.0 * SECONDS);
+			System.Collections.IList excs = (System.Collections.IList) System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(new System.Collections.ArrayList()));
+			
+			SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[NUM_THREAD];
+			for (int i = 0; i < NUM_THREAD; i++)
+			{
+				threads[i] = new AnonymousClassThread1(endTime, writer, excs, this);
+				threads[i].IsBackground = true;
+				threads[i].Start();
+			}
+			
+			int sum = 0;
+			while (System.DateTime.Now.Millisecond < endTime)
+			{
+				IndexReader r2 = r.Reopen();
+				if (r2 != r)
+				{
+					r.Close();
+					r = r2;
+				}
+				Query q = new TermQuery(new Term("indexname", "test"));
+				sum += new IndexSearcher(r).Search(q, 10).totalHits;
+			}
+			
+			for (int i = 0; i < NUM_THREAD; i++)
+			{
+				threads[i].Join();
+			}
+			Assert.IsTrue(sum > 0);
+			
+			Assert.AreEqual(0, excs.Count);
+			writer.Close();
+			
+			_TestUtil.CheckIndex(dir1);
+			r.Close();
+			dir1.Close();
+		}
+		
+		[Test]
+		public virtual void  TestExpungeDeletes()
+		{
+			Directory dir = new MockRAMDirectory();
+			IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
+			Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+			doc.Add(id);
+			id.SetValue("0");
+			w.AddDocument(doc);
+			id.SetValue("1");
+			w.AddDocument(doc);
+			w.DeleteDocuments(new Term("id", "0"));
+			
+			IndexReader r = w.GetReader();
+			w.ExpungeDeletes();
+			w.Close();
+			r.Close();
+			r = IndexReader.Open(dir);
+			Assert.AreEqual(1, r.NumDocs());
+			Assert.IsFalse(r.HasDeletions());
+			r.Close();
+			dir.Close();
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyBug.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestLazyBug.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyBug.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestLazyBug.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,12 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 
 namespace Lucene.Net.Index
 {
@@ -33,8 +33,8 @@
 	/// <summary> Test demonstrating EOF bug on the last field of the last doc 
 	/// if other docs have allready been accessed.
 	/// </summary>
-	[TestFixture]
-	public class TestLazyBug : LuceneTestCase
+    [TestFixture]
+	public class TestLazyBug:LuceneTestCase
 	{
 		[Serializable]
 		public class AnonymousClassFieldSelector : FieldSelector
@@ -49,8 +49,6 @@
 			}
 		}
 		
-		public static int BASE_SEED = 13;
-		
 		public static int NUM_DOCS = 500;
 		public static int NUM_FIELDS = 100;
 		
@@ -62,20 +60,20 @@
 		
 		private static FieldSelector SELECTOR;
 		
-		private static Directory MakeIndex()
+		private Directory MakeIndex()
 		{
 			Directory dir = new RAMDirectory();
 			try
 			{
-				System.Random r = new System.Random((System.Int32) (BASE_SEED + 42));
+				System.Random r = NewRandom();
 				Analyzer analyzer = new SimpleAnalyzer();
-                IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+				IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 				
 				writer.SetUseCompoundFile(false);
 				
 				for (int d = 1; d <= NUM_DOCS; d++)
 				{
-					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+					Document doc = new Document();
 					for (int f = 1; f <= NUM_FIELDS; f++)
 					{
 						doc.Add(new Field("f" + f, data[f % data.Length] + '#' + data[r.Next(data.Length)], Field.Store.YES, Field.Index.ANALYZED));
@@ -91,22 +89,18 @@
 			return dir;
 		}
 		
-		public static void  DoTest(int[] docs)
+		public virtual void  DoTest(int[] docs)
 		{
-			if (dataset.Count == 0)
-			{
-				for (int i = 0; i < data.Length; i++)
-				{
-					dataset.Add(data[i], data[i]);
-				}
-			}
+            if (dataset.Count == 0)
+                for (int i = 0; i < data.Length; i++)
+                    dataset.Add(data[i], data[i]);
 
 			Directory dir = MakeIndex();
 			IndexReader reader = IndexReader.Open(dir);
 			for (int i = 0; i < docs.Length; i++)
 			{
-				Lucene.Net.Documents.Document d = reader.Document(docs[i], SELECTOR);
-				System.String trash = d.Get(MAGIC_FIELD);
+				Document d = reader.Document(docs[i], SELECTOR);
+				d.Get(MAGIC_FIELD);
 				
 				System.Collections.IList fields = d.GetFields();
 				for (System.Collections.IEnumerator fi = fields.GetEnumerator(); fi.MoveNext(); )
@@ -117,7 +111,7 @@
 						f = (Fieldable) fi.Current;
 						System.String fname = f.Name();
 						System.String fval = f.StringValue();
-						Assert.IsNotNull(fval, docs[i] + " FIELD: " + fname);
+						Assert.IsNotNull(docs[i] + " FIELD: " + fname, fval);
 						System.String[] vals = fval.Split('#');
 						if (!dataset.Contains(vals[0]) || !dataset.Contains(vals[1]))
 						{
@@ -150,7 +144,6 @@
 		{
 			DoTest(new int[]{150, 399});
 		}
-
 		static TestLazyBug()
 		{
 			SELECTOR = new AnonymousClassFieldSelector();



Mime
View raw message