lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject [Lucene.Net] svn commit: r1103259 - in /incubator/lucene.net/branches/Lucene.Net_2_9_4g: src/core/QueryParser/ src/core/Support/ test/core/Index/ test/core/Search/
Date Sun, 15 May 2011 00:45:28 GMT
Author: digy
Date: Sun May 15 00:45:27 2011
New Revision: 1103259

URL: http://svn.apache.org/viewvc?rev=1103259&view=rev
Log:
[LUCENENET-412] Code clean up.

Modified:
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/QueryParser/QueryParser.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/CollectionsHelper.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/EquatableList.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestDoc.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriter.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriterDelete.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestStressIndexing2.cs
    incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Search/CheckHits.cs

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/QueryParser/QueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/QueryParser/QueryParser.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/QueryParser/QueryParser.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/QueryParser/QueryParser.cs Sun May 15 00:45:27 2011
@@ -2218,8 +2218,8 @@ label_1_brk: ;  // {{Aroush-2.9}} this l
 			else
 				return (jj_ntk = jj_nt.kind);
 		}
-		
-		private System.Collections.IList jj_expentries = new System.Collections.ArrayList();
+
+        private List<int[]> jj_expentries = new List<int[]>();
 		private int[] jj_expentry;
 		private int jj_kind = - 1;
 		private int[] jj_lasttokens = new int[100];
@@ -2287,7 +2287,7 @@ label_1_brk: ;  // {{Aroush-2.9}} this l
 			int[][] exptokseq = new int[jj_expentries.Count][];
 			for (int i = 0; i < jj_expentries.Count; i++)
 			{
-				exptokseq[i] = (int[]) jj_expentries[i];
+				exptokseq[i] = jj_expentries[i];
 			}
 			return new ParseException(token, exptokseq, Lucene.Net.QueryParsers.QueryParserConstants.tokenImage);
 		}

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/CollectionsHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/CollectionsHelper.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/CollectionsHelper.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/CollectionsHelper.cs Sun May 15 00:45:27 2011
@@ -38,39 +38,6 @@ namespace Lucene.Net.Support
             }
         }
 
-        public static void AddIfNotContains(System.Collections.ArrayList hashtable, System.Object item)
-        {
-            if (hashtable.Contains(item) == false)
-            {
-                hashtable.Add(item);
-            }
-        }
-
-        public static void AddAll(System.Collections.Hashtable hashtable, System.Collections.ICollection items)
-        {
-            System.Collections.IEnumerator iter = items.GetEnumerator();
-            System.Object item;
-            while (iter.MoveNext())
-            {
-                item = iter.Current;
-                hashtable.Add(item, item);
-            }
-        }
-
-        public static void AddAllIfNotContains(System.Collections.Hashtable hashtable, System.Collections.ICollection items)
-        {
-            System.Collections.IEnumerator iter = items.GetEnumerator();
-            System.Object item;
-            while (iter.MoveNext())
-            {
-                item = iter.Current;
-                if (hashtable.Contains(item) == false)
-                {
-                    hashtable.Add(item, item);
-                }
-            }
-        }
-
         public static void AddAllIfNotContains(System.Collections.Generic.IDictionary<string,string> hashtable, System.Collections.Generic.ICollection<string> items)
         {
             foreach (string s in items)
@@ -81,15 +48,7 @@ namespace Lucene.Net.Support
                 }
             }
         }
-
-        public static void AddAll(System.Collections.Generic.IDictionary<string, string> hashtable, System.Collections.Generic.ICollection<string> items)
-        {
-            foreach (string s in items)
-            {
-                hashtable.Add(s, s);
-            }
-        }
-
+        
         public static System.String CollectionToString<T>(System.Collections.Generic.IList<T> c)
         {
             System.Text.StringBuilder sb = new System.Text.StringBuilder();
@@ -171,67 +130,13 @@ namespace Lucene.Net.Support
             return s.ToString();
         }
 
-        /// <summary>
-        /// Compares two string arrays for equality.
-        /// </summary>
-        /// <param name="l1">First string array list to compare</param>
-        /// <param name="l2">Second string array list to compare</param>
-        /// <returns>true if the strings are equal in both arrays, false otherwise</returns>
-        public static bool CompareStringArrays(System.String[] l1, System.String[] l2)
-        {
-            if (l1.Length != l2.Length)
-                return false;
-            for (int i = 0; i < l1.Length; i++)
-            {
-                if (l1[i] != l2[i])
-                    return false;
-            }
-            return true;
-        }
-
-        
-
         public static void Sort<T1>(System.Collections.Generic.IList<T1> list, System.Collections.Generic.IComparer<T1> Comparator)
         {
             if (list.IsReadOnly) throw new System.NotSupportedException();
             if (Comparator == null) ((System.Collections.Generic.List<T1>)list).Sort();
             else ((System.Collections.Generic.List<T1>)list).Sort(Comparator);
         }
-
-        /// <summary>
-        /// Fills the array with an specific value from an specific index to an specific index.
-        /// </summary>
-        /// <param name="array">The array to be filled.</param>
-        /// <param name="fromindex">The first index to be filled.</param>
-        /// <param name="toindex">The last index to be filled.</param>
-        /// <param name="val">The value to fill the array with.</param>
-        public static void Fill(System.Array array, System.Int32 fromindex, System.Int32 toindex, System.Object val)
-        {
-            System.Object Temp_Object = val;
-            System.Type elementtype = array.GetType().GetElementType();
-            if (elementtype != val.GetType())
-                Temp_Object = System.Convert.ChangeType(val, elementtype);
-            if (array.Length == 0)
-                throw (new System.NullReferenceException());
-            if (fromindex > toindex)
-                throw (new System.ArgumentException());
-            if ((fromindex < 0) || ((System.Array)array).Length < toindex)
-                throw (new System.IndexOutOfRangeException());
-            for (int index = (fromindex > 0) ? fromindex-- : fromindex; index < toindex; index++)
-                array.SetValue(Temp_Object, index);
-        }
-
-
-        /// <summary>
-        /// Fills the array with an specific value.
-        /// </summary>
-        /// <param name="array">The array to be filled.</param>
-        /// <param name="val">The value to fill the array with.</param>
-        public static void Fill(System.Array array, System.Object val)
-        {
-            Fill(array, 0, array.Length, val);
-        }
-
+                
         /// <summary>
         /// Compares the entire members of one array whith the other one.
         /// </summary>

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/EquatableList.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/EquatableList.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/EquatableList.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/src/core/Support/EquatableList.cs Sun May 15 00:45:27 2011
@@ -330,5 +330,16 @@ namespace Lucene.Net.Support
         }
 
         #endregion
+
+        public override string ToString()
+        {
+            System.Text.StringBuilder sb = new System.Text.StringBuilder();
+            for (int i = 0; i < Count; i++)
+            {
+                sb.Append(this[i].ToString());
+                if (i != Count - 1) sb.Append(" , ");
+            }
+            return sb.ToString();
+        }
     }
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestDoc.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestDoc.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestDoc.cs Sun May 15 00:45:27 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.Collections.Generic;
 
 using NUnit.Framework;
 
@@ -49,7 +50,7 @@ namespace Lucene.Net.Index
 		
 		private System.IO.FileInfo workDir;
 		private System.IO.FileInfo indexDir;
-		private System.Collections.ArrayList files;
+		private List<System.IO.FileInfo> files;
 		
 		
 		/// <summary>Set the test case. This test case needs
@@ -67,8 +68,8 @@ namespace Lucene.Net.Index
 			
 			Directory directory = FSDirectory.Open(indexDir);
 			directory.Close();
-			
-			files = new System.Collections.ArrayList();
+
+            files = new List<System.IO.FileInfo>();
 			files.Add(CreateOutput("test.txt", "This is the first test file"));
 			
 			files.Add(CreateOutput("test2.txt", "This is the second test file"));

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriter.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriter.cs Sun May 15 00:45:27 2011
@@ -1010,7 +1010,7 @@ namespace Lucene.Net.Index
             IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 
             char[] chars = new char[DocumentsWriter.CHAR_BLOCK_SIZE_ForNUnit - 1];
-            Support.CollectionsHelper.Fill(chars, 'x');
+            for (int i = 0; i < chars.Length; i++) chars[i] = 'x';
             Document doc = new Document();
             System.String bigTerm = new System.String(chars);
 

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriterDelete.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriterDelete.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriterDelete.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestIndexWriterDelete.cs Sun May 15 00:45:27 2011
@@ -31,918 +31,904 @@ using LuceneTestCase = Lucene.Net.Util.L
 
 namespace Lucene.Net.Index
 {
-	
+
     [TestFixture]
-	public class TestIndexWriterDelete:LuceneTestCase
-	{
-		private class AnonymousClassFailure:MockRAMDirectory.Failure
-		{
-			public AnonymousClassFailure(TestIndexWriterDelete enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			private void  InitBlock(TestIndexWriterDelete enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private TestIndexWriterDelete enclosingInstance;
-			public TestIndexWriterDelete Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			internal bool sawMaybe = false;
-			internal bool failed = false;
-			public override MockRAMDirectory.Failure Reset()
-			{
-				sawMaybe = false;
-				failed = false;
-				return this;
-			}
-			public override void  Eval(MockRAMDirectory dir)
-			{
-				if (sawMaybe && !failed)
-				{
-					bool seen = false;
-					System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
-					for (int i = 0; i < trace.FrameCount; i++)
-					{
-						System.Diagnostics.StackFrame sf = trace.GetFrame(i);
-						if ("ApplyDeletes".Equals(sf.GetMethod().Name))
-						{
-							seen = true;
-							break;
-						}
-					}
-					if (!seen)
-					{
-						// Only fail once we are no longer in applyDeletes
-						failed = true;
-						throw new System.IO.IOException("fail after applyDeletes");
-					}
-				}
-				if (!failed)
-				{
-					System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
-					for (int i = 0; i < trace.FrameCount; i++)
-					{
-						System.Diagnostics.StackFrame sf = trace.GetFrame(i);
-						if ("ApplyDeletes".Equals(sf.GetMethod().Name))
-						{
-							sawMaybe = true;
-							break;
-						}
-					}
-				}
-			}
-		}
-		private class AnonymousClassFailure1:MockRAMDirectory.Failure
-		{
-			public AnonymousClassFailure1(TestIndexWriterDelete enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			private void  InitBlock(TestIndexWriterDelete enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private TestIndexWriterDelete enclosingInstance;
-			public TestIndexWriterDelete Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			internal bool failed = false;
-			public override MockRAMDirectory.Failure Reset()
-			{
-				failed = false;
-				return this;
-			}
-			public override void  Eval(MockRAMDirectory dir)
-			{
-				if (!failed)
-				{
-					failed = true;
-					throw new System.IO.IOException("fail in add doc");
-				}
-			}
-		}
-		
-		// test the simple case
-		[Test]
-		public virtual void  TestSimpleCase()
-		{
-			System.String[] keywords = new System.String[]{"1", "2"};
-			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
-			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
-			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetUseCompoundFile(true);
-				modifier.SetMaxBufferedDeleteTerms(1);
-				
-				for (int i = 0; i < keywords.Length; i++)
-				{
-					Document doc = new Document();
-					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
-					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
-					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
-					modifier.AddDocument(doc);
-				}
-				modifier.Optimize();
-				modifier.Commit();
-				
-				Term term = new Term("city", "Amsterdam");
-				int hitCount = GetHitCount(dir, term);
-				Assert.AreEqual(1, hitCount);
-				modifier.DeleteDocuments(term);
-				modifier.Commit();
-				hitCount = GetHitCount(dir, term);
-				Assert.AreEqual(0, hitCount);
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// test when delete terms only apply to disk segments
-		[Test]
-		public virtual void  TestNonRAMDelete()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(2);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 7; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
-				Assert.IsTrue(0 < modifier.GetSegmentCount());
-				
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-				
-				modifier.Commit();
-				
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(0, reader.NumDocs());
-				reader.Close();
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		[Test]
-		public virtual void  TestMaxBufferedDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				Directory dir = new MockRAMDirectory();
-				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				writer.SetMaxBufferedDeleteTerms(1);
-				writer.DeleteDocuments(new Term("foobar", "1"));
-				writer.DeleteDocuments(new Term("foobar", "1"));
-				writer.DeleteDocuments(new Term("foobar", "1"));
-				Assert.AreEqual(3, writer.GetFlushDeletesCount());
-				writer.Close();
-				dir.Close();
-			}
-		}
-		
-		// test when delete terms only apply to ram segments
-		[Test]
-		public virtual void  TestRAMDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				for (int t = 0; t < 2; t++)
-				{
-					bool autoCommit = (0 == pass);
-					Directory dir = new MockRAMDirectory();
-					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-					modifier.SetMaxBufferedDocs(4);
-					modifier.SetMaxBufferedDeleteTerms(4);
-					
-					int id = 0;
-					int value_Renamed = 100;
-					
-					AddDoc(modifier, ++id, value_Renamed);
-					if (0 == t)
-						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-					else
-						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
-					AddDoc(modifier, ++id, value_Renamed);
-					if (0 == t)
-					{
-						modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-						Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
-						Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
-					}
-					else
-						modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
-					
-					AddDoc(modifier, ++id, value_Renamed);
-					Assert.AreEqual(0, modifier.GetSegmentCount());
-					modifier.Flush();
-					
-					modifier.Commit();
-					
-					IndexReader reader = IndexReader.Open(dir);
-					Assert.AreEqual(1, reader.NumDocs());
-					
-					int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
-					Assert.AreEqual(1, hitCount);
-					reader.Close();
-					modifier.Close();
-					dir.Close();
-				}
-			}
-		}
-		
-		// test when delete terms apply to both disk and ram segments
-		[Test]
-		public virtual void  TestBothDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(100);
-				modifier.SetMaxBufferedDeleteTerms(100);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 5; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				
-				value_Renamed = 200;
-				for (int i = 0; i < 5; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				for (int i = 0; i < 5; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
-				
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(5, reader.NumDocs());
-				modifier.Close();
-			}
-		}
-		
-		// test that batched delete terms are flushed together
-		[Test]
-		public virtual void  TestBatchDeletes()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(2);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 7; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				id = 0;
-				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-				modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
-				
-				modifier.Commit();
-				
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(5, reader.NumDocs());
-				reader.Close();
-				
-				Term[] terms = new Term[3];
-				for (int i = 0; i < terms.Length; i++)
-				{
-					terms[i] = new Term("id", System.Convert.ToString(++id));
-				}
-				modifier.DeleteDocuments(terms);
-				modifier.Commit();
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(2, reader.NumDocs());
-				reader.Close();
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// test deleteAll()
-		[Test]
-		public virtual void  TestDeleteAll()
-		{
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				Directory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetMaxBufferedDocs(2);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				int id = 0;
-				int value_Renamed = 100;
-				
-				for (int i = 0; i < 7; i++)
-				{
-					AddDoc(modifier, ++id, value_Renamed);
-				}
-				modifier.Commit();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				// Add 1 doc (so we will have something buffered)
-				AddDoc(modifier, 99, value_Renamed);
-				
-				// Delete all
-				modifier.DeleteAll();
-				
-				// Delete all shouldn't be on disk yet
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(7, reader.NumDocs());
-				reader.Close();
-				
-				// Add a doc and update a doc (after the deleteAll, before the commit)
-				AddDoc(modifier, 101, value_Renamed);
-				UpdateDoc(modifier, 102, value_Renamed);
-				
-				// commit the delete all
-				modifier.Commit();
-				
-				// Validate there are no docs left
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(2, reader.NumDocs());
-				reader.Close();
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// test rollback of deleteAll()
-		[Test]
-		public virtual void  TestDeleteAllRollback()
-		{
-			Directory dir = new MockRAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
-			modifier.SetMaxBufferedDocs(2);
-			modifier.SetMaxBufferedDeleteTerms(2);
-			
-			int id = 0;
-			int value_Renamed = 100;
-			
-			for (int i = 0; i < 7; i++)
-			{
-				AddDoc(modifier, ++id, value_Renamed);
-			}
-			modifier.Commit();
-			
-			AddDoc(modifier, ++id, value_Renamed);
-			
-			IndexReader reader = IndexReader.Open(dir);
-			Assert.AreEqual(7, reader.NumDocs());
-			reader.Close();
-			
-			// Delete all
-			modifier.DeleteAll();
-			
-			// Roll it back
-			modifier.Rollback();
-			modifier.Close();
-			
-			// Validate that the docs are still there
-			reader = IndexReader.Open(dir);
-			Assert.AreEqual(7, reader.NumDocs());
-			reader.Close();
-			
-			dir.Close();
-		}
-		
-		
-		// test deleteAll() w/ near real-time reader
-		[Test]
-		public virtual void  TestDeleteAllNRT()
-		{
-			Directory dir = new MockRAMDirectory();
-			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
-			modifier.SetMaxBufferedDocs(2);
-			modifier.SetMaxBufferedDeleteTerms(2);
-			
-			int id = 0;
-			int value_Renamed = 100;
-			
-			for (int i = 0; i < 7; i++)
-			{
-				AddDoc(modifier, ++id, value_Renamed);
-			}
-			modifier.Commit();
-			
-			IndexReader reader = modifier.GetReader();
-			Assert.AreEqual(7, reader.NumDocs());
-			reader.Close();
-			
-			AddDoc(modifier, ++id, value_Renamed);
-			AddDoc(modifier, ++id, value_Renamed);
-			
-			// Delete all
-			modifier.DeleteAll();
-			
-			reader = modifier.GetReader();
-			Assert.AreEqual(0, reader.NumDocs());
-			reader.Close();
-			
-			
-			// Roll it back
-			modifier.Rollback();
-			modifier.Close();
-			
-			// Validate that the docs are still there
-			reader = IndexReader.Open(dir);
-			Assert.AreEqual(7, reader.NumDocs());
-			reader.Close();
-			
-			dir.Close();
-		}
-		
-		
-		private void  UpdateDoc(IndexWriter modifier, int id, int value_Renamed)
-		{
-			Document doc = new Document();
-			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
-			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
-			doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
-			modifier.UpdateDocument(new Term("id", System.Convert.ToString(id)), doc);
-		}
-		
-		
-		private void  AddDoc(IndexWriter modifier, int id, int value_Renamed)
-		{
-			Document doc = new Document();
-			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
-			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
-			doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
-			modifier.AddDocument(doc);
-		}
-		
-		private int GetHitCount(Directory dir, Term term)
-		{
-			IndexSearcher searcher = new IndexSearcher(dir);
-			int hitCount = searcher.Search(new TermQuery(term), null, 1000).TotalHits;
-			searcher.Close();
-			return hitCount;
-		}
-		
-		[Test]
-		public virtual void  TestDeletesOnDiskFull()
-		{
-			TestOperationsOnDiskFull(false);
-		}
-		
-		[Test]
-		public virtual void  TestUpdatesOnDiskFull()
-		{
-			TestOperationsOnDiskFull(true);
-		}
-		
-		/// <summary> Make sure if modifier tries to commit but hits disk full that modifier
-		/// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
-		/// </summary>
-		private void  TestOperationsOnDiskFull(bool updates)
-		{
-			
-			bool debug = false;
-			Term searchTerm = new Term("content", "aaa");
-			int START_COUNT = 157;
-			int END_COUNT = 144;
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				
-				// First build up a starting index:
-				MockRAMDirectory startDir = new MockRAMDirectory();
-				IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
-				for (int i = 0; i < 157; i++)
-				{
-					Document d = new Document();
-					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
-					d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
-					writer.AddDocument(d);
-				}
-				writer.Close();
-				
-				long diskUsage = startDir.SizeInBytes();
-				long diskFree = diskUsage + 10;
-				
-				System.IO.IOException err = null;
-				
-				bool done = false;
-				
-				// Iterate w/ ever increasing free disk space:
-				while (!done)
-				{
-					MockRAMDirectory dir = new MockRAMDirectory(startDir);
-					dir.SetPreventDoubleWrite(false);
-					IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
-					
-					modifier.SetMaxBufferedDocs(1000); // use flush or close
-					modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
-					
-					// For each disk size, first try to commit against
-					// dir that will hit random IOExceptions & disk
-					// full; after, give it infinite disk space & turn
-					// off random IOExceptions & retry w/ same reader:
-					bool success = false;
-					
-					for (int x = 0; x < 2; x++)
-					{
-						
-						double rate = 0.1;
-						double diskRatio = ((double) diskFree) / diskUsage;
-						long thisDiskFree;
-						System.String testName;
-						
-						if (0 == x)
-						{
-							thisDiskFree = diskFree;
-							if (diskRatio >= 2.0)
-							{
-								rate /= 2;
-							}
-							if (diskRatio >= 4.0)
-							{
-								rate /= 2;
-							}
-							if (diskRatio >= 6.0)
-							{
-								rate = 0.0;
-							}
-							if (debug)
-							{
-								System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
-							}
-							testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
-						}
-						else
-						{
-							thisDiskFree = 0;
-							rate = 0.0;
-							if (debug)
-							{
-								System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
-							}
-							testName = "reader re-use after disk full";
-						}
-						
-						dir.SetMaxSizeInBytes(thisDiskFree);
-						dir.SetRandomIOExceptionRate(rate, diskFree);
-						
-						try
-						{
-							if (0 == x)
-							{
-								int docId = 12;
-								for (int i = 0; i < 13; i++)
-								{
-									if (updates)
-									{
-										Document d = new Document();
-										d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
-										d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
-										modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
-									}
-									else
-									{
-										// deletes
-										modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
-										// modifier.setNorm(docId, "contents", (float)2.0);
-									}
-									docId += 12;
-								}
-							}
-							modifier.Close();
-							success = true;
-							if (0 == x)
-							{
-								done = true;
-							}
-						}
-						catch (System.IO.IOException e)
-						{
-							if (debug)
-							{
-								System.Console.Out.WriteLine("  hit IOException: " + e);
-								System.Console.Out.WriteLine(e.StackTrace);
-							}
-							err = e;
-							if (1 == x)
-							{
-								System.Console.Error.WriteLine(e.StackTrace);
-								Assert.Fail(testName + " hit IOException after disk space was freed up");
-							}
-						}
-						
-						// If the close() succeeded, make sure there are
-						// no unreferenced files.
+    public class TestIndexWriterDelete : LuceneTestCase
+    {
+        private class AnonymousClassFailure : MockRAMDirectory.Failure
+        {
+            public AnonymousClassFailure(TestIndexWriterDelete enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+            }
+            private void InitBlock(TestIndexWriterDelete enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private TestIndexWriterDelete enclosingInstance;
+            public TestIndexWriterDelete Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+
+            }
+            internal bool sawMaybe = false;
+            internal bool failed = false;
+            public override MockRAMDirectory.Failure Reset()
+            {
+                sawMaybe = false;
+                failed = false;
+                return this;
+            }
+            public override void Eval(MockRAMDirectory dir)
+            {
+                if (sawMaybe && !failed)
+                {
+                    bool seen = false;
+                    System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
+                    for (int i = 0; i < trace.FrameCount; i++)
+                    {
+                        System.Diagnostics.StackFrame sf = trace.GetFrame(i);
+                        if ("ApplyDeletes".Equals(sf.GetMethod().Name))
+                        {
+                            seen = true;
+                            break;
+                        }
+                    }
+                    if (!seen)
+                    {
+                        // Only fail once we are no longer in applyDeletes
+                        failed = true;
+                        throw new System.IO.IOException("fail after applyDeletes");
+                    }
+                }
+                if (!failed)
+                {
+                    System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
+                    for (int i = 0; i < trace.FrameCount; i++)
+                    {
+                        System.Diagnostics.StackFrame sf = trace.GetFrame(i);
+                        if ("ApplyDeletes".Equals(sf.GetMethod().Name))
+                        {
+                            sawMaybe = true;
+                            break;
+                        }
+                    }
+                }
+            }
+        }
+        private class AnonymousClassFailure1 : MockRAMDirectory.Failure
+        {
+            public AnonymousClassFailure1(TestIndexWriterDelete enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+            }
+            private void InitBlock(TestIndexWriterDelete enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private TestIndexWriterDelete enclosingInstance;
+            public TestIndexWriterDelete Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+
+            }
+            internal bool failed = false;
+            public override MockRAMDirectory.Failure Reset()
+            {
+                failed = false;
+                return this;
+            }
+            public override void Eval(MockRAMDirectory dir)
+            {
+                if (!failed)
+                {
+                    failed = true;
+                    throw new System.IO.IOException("fail in add doc");
+                }
+            }
+        }
+
+        // test the simple case
+        [Test]
+        public virtual void TestSimpleCase()
+        {
+            System.String[] keywords = new System.String[] { "1", "2" };
+            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
+            System.String[] unstored = new System.String[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
+            System.String[] text = new System.String[] { "Amsterdam", "Venice" };
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+
+                Directory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                modifier.SetUseCompoundFile(true);
+                modifier.SetMaxBufferedDeleteTerms(1);
+
+                for (int i = 0; i < keywords.Length; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+                    doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
+                    doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
+                    modifier.AddDocument(doc);
+                }
+                modifier.Optimize();
+                modifier.Commit();
+
+                Term term = new Term("city", "Amsterdam");
+                int hitCount = GetHitCount(dir, term);
+                Assert.AreEqual(1, hitCount);
+                modifier.DeleteDocuments(term);
+                modifier.Commit();
+                hitCount = GetHitCount(dir, term);
+                Assert.AreEqual(0, hitCount);
+
+                modifier.Close();
+                dir.Close();
+            }
+        }
+
+        // test when delete terms only apply to disk segments
+        [Test]
+        public virtual void TestNonRAMDelete()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+
+                Directory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                modifier.SetMaxBufferedDocs(2);
+                modifier.SetMaxBufferedDeleteTerms(2);
+
+                int id = 0;
+                int value_Renamed = 100;
+
+                for (int i = 0; i < 7; i++)
+                {
+                    AddDoc(modifier, ++id, value_Renamed);
+                }
+                modifier.Commit();
+
+                Assert.AreEqual(0, modifier.GetNumBufferedDocuments());
+                Assert.IsTrue(0 < modifier.GetSegmentCount());
+
+                modifier.Commit();
+
+                IndexReader reader = IndexReader.Open(dir);
+                Assert.AreEqual(7, reader.NumDocs());
+                reader.Close();
+
+                modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+
+                modifier.Commit();
+
+                reader = IndexReader.Open(dir);
+                Assert.AreEqual(0, reader.NumDocs());
+                reader.Close();
+                modifier.Close();
+                dir.Close();
+            }
+        }
+
+        [Test]
+        public virtual void TestMaxBufferedDeletes()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+                Directory dir = new MockRAMDirectory();
+                IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                writer.SetMaxBufferedDeleteTerms(1);
+                writer.DeleteDocuments(new Term("foobar", "1"));
+                writer.DeleteDocuments(new Term("foobar", "1"));
+                writer.DeleteDocuments(new Term("foobar", "1"));
+                Assert.AreEqual(3, writer.GetFlushDeletesCount());
+                writer.Close();
+                dir.Close();
+            }
+        }
+
+        // test when delete terms only apply to ram segments
+        [Test]
+        public virtual void TestRAMDeletes()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                for (int t = 0; t < 2; t++)
+                {
+                    bool autoCommit = (0 == pass);
+                    Directory dir = new MockRAMDirectory();
+                    IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                    modifier.SetMaxBufferedDocs(4);
+                    modifier.SetMaxBufferedDeleteTerms(4);
+
+                    int id = 0;
+                    int value_Renamed = 100;
+
+                    AddDoc(modifier, ++id, value_Renamed);
+                    if (0 == t)
+                        modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+                    else
+                        modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
+                    AddDoc(modifier, ++id, value_Renamed);
+                    if (0 == t)
+                    {
+                        modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+                        Assert.AreEqual(2, modifier.GetNumBufferedDeleteTerms());
+                        Assert.AreEqual(1, modifier.GetBufferedDeleteTermsSize());
+                    }
+                    else
+                        modifier.DeleteDocuments(new TermQuery(new Term("value", System.Convert.ToString(value_Renamed))));
+
+                    AddDoc(modifier, ++id, value_Renamed);
+                    Assert.AreEqual(0, modifier.GetSegmentCount());
+                    modifier.Flush();
+
+                    modifier.Commit();
+
+                    IndexReader reader = IndexReader.Open(dir);
+                    Assert.AreEqual(1, reader.NumDocs());
+
+                    int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
+                    Assert.AreEqual(1, hitCount);
+                    reader.Close();
+                    modifier.Close();
+                    dir.Close();
+                }
+            }
+        }
+
+        // test when delete terms apply to both disk and ram segments
+        [Test]
+        public virtual void TestBothDeletes()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+
+                Directory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                modifier.SetMaxBufferedDocs(100);
+                modifier.SetMaxBufferedDeleteTerms(100);
+
+                int id = 0;
+                int value_Renamed = 100;
+
+                for (int i = 0; i < 5; i++)
+                {
+                    AddDoc(modifier, ++id, value_Renamed);
+                }
+
+                value_Renamed = 200;
+                for (int i = 0; i < 5; i++)
+                {
+                    AddDoc(modifier, ++id, value_Renamed);
+                }
+                modifier.Commit();
+
+                for (int i = 0; i < 5; i++)
+                {
+                    AddDoc(modifier, ++id, value_Renamed);
+                }
+                modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
+
+                modifier.Commit();
+
+                IndexReader reader = IndexReader.Open(dir);
+                Assert.AreEqual(5, reader.NumDocs());
+                modifier.Close();
+            }
+        }
+
+        // test that batched delete terms are flushed together
+        [Test]
+        public virtual void TestBatchDeletes()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+                Directory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                modifier.SetMaxBufferedDocs(2);
+                modifier.SetMaxBufferedDeleteTerms(2);
+
+                int id = 0;
+                int value_Renamed = 100;
+
+                for (int i = 0; i < 7; i++)
+                {
+                    AddDoc(modifier, ++id, value_Renamed);
+                }
+                modifier.Commit();
+
+                IndexReader reader = IndexReader.Open(dir);
+                Assert.AreEqual(7, reader.NumDocs());
+                reader.Close();
+
+                id = 0;
+                modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
+                modifier.DeleteDocuments(new Term("id", System.Convert.ToString(++id)));
+
+                modifier.Commit();
+
+                reader = IndexReader.Open(dir);
+                Assert.AreEqual(5, reader.NumDocs());
+                reader.Close();
+
+                Term[] terms = new Term[3];
+                for (int i = 0; i < terms.Length; i++)
+                {
+                    terms[i] = new Term("id", System.Convert.ToString(++id));
+                }
+                modifier.DeleteDocuments(terms);
+                modifier.Commit();
+                reader = IndexReader.Open(dir);
+                Assert.AreEqual(2, reader.NumDocs());
+                reader.Close();
+
+                modifier.Close();
+                dir.Close();
+            }
+        }
+
+        // test deleteAll()
+        [Test]
+        public virtual void TestDeleteAll()
+        {
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+                Directory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                modifier.SetMaxBufferedDocs(2);
+                modifier.SetMaxBufferedDeleteTerms(2);
+
+                int id = 0;
+                int value_Renamed = 100;
+
+                for (int i = 0; i < 7; i++)
+                {
+                    AddDoc(modifier, ++id, value_Renamed);
+                }
+                modifier.Commit();
+
+                IndexReader reader = IndexReader.Open(dir);
+                Assert.AreEqual(7, reader.NumDocs());
+                reader.Close();
+
+                // Add 1 doc (so we will have something buffered)
+                AddDoc(modifier, 99, value_Renamed);
+
+                // Delete all
+                modifier.DeleteAll();
+
+                // Delete all shouldn't be on disk yet
+                reader = IndexReader.Open(dir);
+                Assert.AreEqual(7, reader.NumDocs());
+                reader.Close();
+
+                // Add a doc and update a doc (after the deleteAll, before the commit)
+                AddDoc(modifier, 101, value_Renamed);
+                UpdateDoc(modifier, 102, value_Renamed);
+
+                // commit the delete all
+                modifier.Commit();
+
+                // Validate there are no docs left
+                reader = IndexReader.Open(dir);
+                Assert.AreEqual(2, reader.NumDocs());
+                reader.Close();
+
+                modifier.Close();
+                dir.Close();
+            }
+        }
+
+        // test rollback of deleteAll()
+        [Test]
+        public virtual void TestDeleteAllRollback()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+            modifier.SetMaxBufferedDocs(2);
+            modifier.SetMaxBufferedDeleteTerms(2);
+
+            int id = 0;
+            int value_Renamed = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+            modifier.Commit();
+
+            AddDoc(modifier, ++id, value_Renamed);
+
+            IndexReader reader = IndexReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs());
+            reader.Close();
+
+            // Delete all
+            modifier.DeleteAll();
+
+            // Roll it back
+            modifier.Rollback();
+            modifier.Close();
+
+            // Validate that the docs are still there
+            reader = IndexReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs());
+            reader.Close();
+
+            dir.Close();
+        }
+
+
+        // test deleteAll() w/ near real-time reader
+        [Test]
+        public virtual void TestDeleteAllNRT()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
+            modifier.SetMaxBufferedDocs(2);
+            modifier.SetMaxBufferedDeleteTerms(2);
+
+            int id = 0;
+            int value_Renamed = 100;
+
+            for (int i = 0; i < 7; i++)
+            {
+                AddDoc(modifier, ++id, value_Renamed);
+            }
+            modifier.Commit();
+
+            IndexReader reader = modifier.GetReader();
+            Assert.AreEqual(7, reader.NumDocs());
+            reader.Close();
+
+            AddDoc(modifier, ++id, value_Renamed);
+            AddDoc(modifier, ++id, value_Renamed);
+
+            // Delete all
+            modifier.DeleteAll();
+
+            reader = modifier.GetReader();
+            Assert.AreEqual(0, reader.NumDocs());
+            reader.Close();
+
+
+            // Roll it back
+            modifier.Rollback();
+            modifier.Close();
+
+            // Validate that the docs are still there
+            reader = IndexReader.Open(dir);
+            Assert.AreEqual(7, reader.NumDocs());
+            reader.Close();
+
+            dir.Close();
+        }
+
+
+        private void UpdateDoc(IndexWriter modifier, int id, int value_Renamed)
+        {
+            Document doc = new Document();
+            doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
+            doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
+            doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
+            modifier.UpdateDocument(new Term("id", System.Convert.ToString(id)), doc);
+        }
+
+
+        private void AddDoc(IndexWriter modifier, int id, int value_Renamed)
+        {
+            Document doc = new Document();
+            doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
+            doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.NOT_ANALYZED));
+            doc.Add(new Field("value", System.Convert.ToString(value_Renamed), Field.Store.NO, Field.Index.NOT_ANALYZED));
+            modifier.AddDocument(doc);
+        }
+
+        private int GetHitCount(Directory dir, Term term)
+        {
+            IndexSearcher searcher = new IndexSearcher(dir);
+            int hitCount = searcher.Search(new TermQuery(term), null, 1000).TotalHits;
+            searcher.Close();
+            return hitCount;
+        }
+
+        [Test]
+        public virtual void TestDeletesOnDiskFull()
+        {
+            TestOperationsOnDiskFull(false);
+        }
+
+        [Test]
+        public virtual void TestUpdatesOnDiskFull()
+        {
+            TestOperationsOnDiskFull(true);
+        }
+
+        /// <summary> Make sure if modifier tries to commit but hits disk full that modifier
+        /// remains consistent and usable. Similar to TestIndexReader.testDiskFull().
+        /// </summary>
+        private void TestOperationsOnDiskFull(bool updates)
+        {
+
+            bool debug = false;
+            Term searchTerm = new Term("content", "aaa");
+            int START_COUNT = 157;
+            int END_COUNT = 144;
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+
+                // First build up a starting index:
+                MockRAMDirectory startDir = new MockRAMDirectory();
+                IndexWriter writer = new IndexWriter(startDir, autoCommit, new WhitespaceAnalyzer(), true);
+                for (int i = 0; i < 157; i++)
+                {
+                    Document d = new Document();
+                    d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    d.Add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
+                    writer.AddDocument(d);
+                }
+                writer.Close();
+
+                long diskUsage = startDir.SizeInBytes();
+                long diskFree = diskUsage + 10;
+
+                System.IO.IOException err = null;
+
+                bool done = false;
+
+                // Iterate w/ ever increasing free disk space:
+                while (!done)
+                {
+                    MockRAMDirectory dir = new MockRAMDirectory(startDir);
+                    dir.SetPreventDoubleWrite(false);
+                    IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
+
+                    modifier.SetMaxBufferedDocs(1000); // use flush or close
+                    modifier.SetMaxBufferedDeleteTerms(1000); // use flush or close
+
+                    // For each disk size, first try to commit against
+                    // dir that will hit random IOExceptions & disk
+                    // full; after, give it infinite disk space & turn
+                    // off random IOExceptions & retry w/ same reader:
+                    bool success = false;
+
+                    for (int x = 0; x < 2; x++)
+                    {
+
+                        double rate = 0.1;
+                        double diskRatio = ((double)diskFree) / diskUsage;
+                        long thisDiskFree;
+                        System.String testName;
+
+                        if (0 == x)
+                        {
+                            thisDiskFree = diskFree;
+                            if (diskRatio >= 2.0)
+                            {
+                                rate /= 2;
+                            }
+                            if (diskRatio >= 4.0)
+                            {
+                                rate /= 2;
+                            }
+                            if (diskRatio >= 6.0)
+                            {
+                                rate = 0.0;
+                            }
+                            if (debug)
+                            {
+                                System.Console.Out.WriteLine("\ncycle: " + diskFree + " bytes");
+                            }
+                            testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
+                        }
+                        else
+                        {
+                            thisDiskFree = 0;
+                            rate = 0.0;
+                            if (debug)
+                            {
+                                System.Console.Out.WriteLine("\ncycle: same writer: unlimited disk space");
+                            }
+                            testName = "reader re-use after disk full";
+                        }
+
+                        dir.SetMaxSizeInBytes(thisDiskFree);
+                        dir.SetRandomIOExceptionRate(rate, diskFree);
+
+                        try
+                        {
+                            if (0 == x)
+                            {
+                                int docId = 12;
+                                for (int i = 0; i < 13; i++)
+                                {
+                                    if (updates)
+                                    {
+                                        Document d = new Document();
+                                        d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+                                        d.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED));
+                                        modifier.UpdateDocument(new Term("id", System.Convert.ToString(docId)), d);
+                                    }
+                                    else
+                                    {
+                                        // deletes
+                                        modifier.DeleteDocuments(new Term("id", System.Convert.ToString(docId)));
+                                        // modifier.setNorm(docId, "contents", (float)2.0);
+                                    }
+                                    docId += 12;
+                                }
+                            }
+                            modifier.Close();
+                            success = true;
+                            if (0 == x)
+                            {
+                                done = true;
+                            }
+                        }
+                        catch (System.IO.IOException e)
+                        {
+                            if (debug)
+                            {
+                                System.Console.Out.WriteLine("  hit IOException: " + e);
+                                System.Console.Out.WriteLine(e.StackTrace);
+                            }
+                            err = e;
+                            if (1 == x)
+                            {
+                                System.Console.Error.WriteLine(e.StackTrace);
+                                Assert.Fail(testName + " hit IOException after disk space was freed up");
+                            }
+                        }
+
+                        // If the close() succeeded, make sure there are
+                        // no unreferenced files.
                         if (success)
                         {
                             Lucene.Net.Util._TestUtil.CheckIndex(dir);
                             TestIndexWriter.AssertNoUnreferencedFiles(dir, "after writer.close");
                         }
-						
-						// Finally, verify index is not corrupt, and, if
-						// we succeeded, we see all docs changed, and if
-						// we failed, we see either all docs or no docs
-						// changed (transactional semantics):
-						IndexReader newReader = null;
-						try
-						{
-							newReader = IndexReader.Open(dir);
-						}
-						catch (System.IO.IOException e)
-						{
-							System.Console.Error.WriteLine(e.StackTrace);
-							Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
-						}
-						
-						IndexSearcher searcher = new IndexSearcher(newReader);
-						ScoreDoc[] hits = null;
-						try
-						{
-							hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
-						}
-						catch (System.IO.IOException e)
-						{
-							System.Console.Error.WriteLine(e.StackTrace);
-							Assert.Fail(testName + ": exception when searching: " + e);
-						}
-						int result2 = hits.Length;
-						if (success)
-						{
-							if (x == 0 && result2 != END_COUNT)
-							{
-								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
-							}
-							else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
-							{
-								// It's possible that the first exception was
-								// "recoverable" wrt pending deletes, in which
-								// case the pending deletes are retained and
-								// then re-flushing (with plenty of disk
-								// space) will succeed in flushing the
-								// deletes:
-								Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
-							}
-						}
-						else
-						{
-							// On hitting exception we still may have added
-							// all docs:
-							if (result2 != START_COUNT && result2 != END_COUNT)
-							{
-								System.Console.Error.WriteLine(err.StackTrace);
-								Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
-							}
-						}
-						
-						searcher.Close();
-						newReader.Close();
-						
-						if (result2 == END_COUNT)
-						{
-							break;
-						}
-					}
-					
-					dir.Close();
-					
-					// Try again with 10 more bytes of free space:
-					diskFree += 10;
-				}
-			}
-		}
-		
-		// This test tests that buffered deletes are cleared when
-		// an Exception is hit during flush.
-		[Test]
-		public virtual void  TestErrorAfterApplyDeletes()
-		{
-			
-			MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
-			
-			// create a couple of files
-			
-			System.String[] keywords = new System.String[]{"1", "2"};
-			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
-			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
-			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				MockRAMDirectory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				modifier.SetUseCompoundFile(true);
-				modifier.SetMaxBufferedDeleteTerms(2);
-				
-				dir.FailOn(failure.Reset());
-				
-				for (int i = 0; i < keywords.Length; i++)
-				{
-					Document doc = new Document();
-					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
-					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
-					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
-					modifier.AddDocument(doc);
-				}
-				// flush (and commit if ac)
-				
-				modifier.Optimize();
-				modifier.Commit();
-				
-				// one of the two files hits
-				
-				Term term = new Term("city", "Amsterdam");
-				int hitCount = GetHitCount(dir, term);
-				Assert.AreEqual(1, hitCount);
-				
-				// open the writer again (closed above)
-				
-				// delete the doc
-				// max buf del terms is two, so this is buffered
-				
-				modifier.DeleteDocuments(term);
-				
-				// add a doc (needed for the !ac case; see below)
-				// doc remains buffered
-				
-				Document doc2 = new Document();
-				modifier.AddDocument(doc2);
-				
-				// commit the changes, the buffered deletes, and the new doc
-				
-				// The failure object will fail on the first write after the del
-				// file gets created when processing the buffered delete
-				
-				// in the ac case, this will be when writing the new segments
-				// files so we really don't need the new doc, but it's harmless
-				
-				// in the !ac case, a new segments file won't be created but in
-				// this case, creation of the cfs file happens next so we need
-				// the doc (to test that it's okay that we don't lose deletes if
-				// failing while creating the cfs file)
-				
-				bool failed = false;
-				try
-				{
-					modifier.Commit();
-				}
-				catch (System.IO.IOException ioe)
-				{
-					failed = true;
-				}
-				
-				Assert.IsTrue(failed);
-				
-				// The commit above failed, so we need to retry it (which will
-				// succeed, because the failure is a one-shot)
-				
-				modifier.Commit();
-				
-				hitCount = GetHitCount(dir, term);
-				
-				// Make sure the delete was successfully flushed:
-				Assert.AreEqual(0, hitCount);
-				
-				modifier.Close();
-				dir.Close();
-			}
-		}
-		
-		// This test tests that the files created by the docs writer before
-		// a segment is written are cleaned up if there's an i/o error
-		
-		[Test]
-		public virtual void  TestErrorInDocsWriterAdd()
-		{
-			
-			MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
-			
-			// create a couple of files
-			
-			System.String[] keywords = new System.String[]{"1", "2"};
-			System.String[] unindexed = new System.String[]{"Netherlands", "Italy"};
-			System.String[] unstored = new System.String[]{"Amsterdam has lots of bridges", "Venice has lots of canals"};
-			System.String[] text = new System.String[]{"Amsterdam", "Venice"};
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = (0 == pass);
-				MockRAMDirectory dir = new MockRAMDirectory();
-				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-				
-				dir.FailOn(failure.Reset());
-				
-				for (int i = 0; i < keywords.Length; i++)
-				{
-					Document doc = new Document();
-					doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
-					doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
-					doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
-					doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
-					try
-					{
-						modifier.AddDocument(doc);
-					}
-					catch (System.IO.IOException io)
-					{
-						break;
-					}
-				}
-				
-				System.String[] startFiles = dir.ListAll();
-				SegmentInfos infos = new SegmentInfos();
-				infos.Read(dir);
-				new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null,null);
-				System.String[] endFiles = dir.ListAll();
-				
-				if (!Support.CollectionsHelper.CompareStringArrays(startFiles, endFiles))
-				{
-					Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + ArrayToString(startFiles) + "\n  after delete:\n    " + ArrayToString(endFiles));
-				}
-				
-				modifier.Close();
-			}
-		}
-		
-		private System.String ArrayToString(System.String[] l)
-		{
-			System.String s = "";
-			for (int i = 0; i < l.Length; i++)
-			{
-				if (i > 0)
-				{
-					s += "\n    ";
-				}
-				s += l[i];
-			}
-			return s;
-		}
-	}
+
+                        // Finally, verify index is not corrupt, and, if
+                        // we succeeded, we see all docs changed, and if
+                        // we failed, we see either all docs or no docs
+                        // changed (transactional semantics):
+                        IndexReader newReader = null;
+                        try
+                        {
+                            newReader = IndexReader.Open(dir);
+                        }
+                        catch (System.IO.IOException e)
+                        {
+                            System.Console.Error.WriteLine(e.StackTrace);
+                            Assert.Fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
+                        }
+
+                        IndexSearcher searcher = new IndexSearcher(newReader);
+                        ScoreDoc[] hits = null;
+                        try
+                        {
+                            hits = searcher.Search(new TermQuery(searchTerm), null, 1000).ScoreDocs;
+                        }
+                        catch (System.IO.IOException e)
+                        {
+                            System.Console.Error.WriteLine(e.StackTrace);
+                            Assert.Fail(testName + ": exception when searching: " + e);
+                        }
+                        int result2 = hits.Length;
+                        if (success)
+                        {
+                            if (x == 0 && result2 != END_COUNT)
+                            {
+                                Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
+                            }
+                            else if (x == 1 && result2 != START_COUNT && result2 != END_COUNT)
+                            {
+                                // It's possible that the first exception was
+                                // "recoverable" wrt pending deletes, in which
+                                // case the pending deletes are retained and
+                                // then re-flushing (with plenty of disk
+                                // space) will succeed in flushing the
+                                // deletes:
+                                Assert.Fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+                            }
+                        }
+                        else
+                        {
+                            // On hitting exception we still may have added
+                            // all docs:
+                            if (result2 != START_COUNT && result2 != END_COUNT)
+                            {
+                                System.Console.Error.WriteLine(err.StackTrace);
+                                Assert.Fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
+                            }
+                        }
+
+                        searcher.Close();
+                        newReader.Close();
+
+                        if (result2 == END_COUNT)
+                        {
+                            break;
+                        }
+                    }
+
+                    dir.Close();
+
+                    // Try again with 10 more bytes of free space:
+                    diskFree += 10;
+                }
+            }
+        }
+
+        // This test tests that buffered deletes are cleared when
+        // an Exception is hit during flush.
+        [Test]
+        public virtual void TestErrorAfterApplyDeletes()
+        {
+
+            MockRAMDirectory.Failure failure = new AnonymousClassFailure(this);
+
+            // create a couple of files
+
+            System.String[] keywords = new System.String[] { "1", "2" };
+            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
+            System.String[] unstored = new System.String[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
+            System.String[] text = new System.String[] { "Amsterdam", "Venice" };
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+                MockRAMDirectory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+                modifier.SetUseCompoundFile(true);
+                modifier.SetMaxBufferedDeleteTerms(2);
+
+                dir.FailOn(failure.Reset());
+
+                for (int i = 0; i < keywords.Length; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+                    doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
+                    doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
+                    modifier.AddDocument(doc);
+                }
+                // flush (and commit if ac)
+
+                modifier.Optimize();
+                modifier.Commit();
+
+                // one of the two files hits
+
+                Term term = new Term("city", "Amsterdam");
+                int hitCount = GetHitCount(dir, term);
+                Assert.AreEqual(1, hitCount);
+
+                // open the writer again (closed above)
+
+                // delete the doc
+                // max buf del terms is two, so this is buffered
+
+                modifier.DeleteDocuments(term);
+
+                // add a doc (needed for the !ac case; see below)
+                // doc remains buffered
+
+                Document doc2 = new Document();
+                modifier.AddDocument(doc2);
+
+                // commit the changes, the buffered deletes, and the new doc
+
+                // The failure object will fail on the first write after the del
+                // file gets created when processing the buffered delete
+
+                // in the ac case, this will be when writing the new segments
+                // files so we really don't need the new doc, but it's harmless
+
+                // in the !ac case, a new segments file won't be created but in
+                // this case, creation of the cfs file happens next so we need
+                // the doc (to test that it's okay that we don't lose deletes if
+                // failing while creating the cfs file)
+
+                bool failed = false;
+                try
+                {
+                    modifier.Commit();
+                }
+                catch (System.IO.IOException ioe)
+                {
+                    failed = true;
+                }
+
+                Assert.IsTrue(failed);
+
+                // The commit above failed, so we need to retry it (which will
+                // succeed, because the failure is a one-shot)
+
+                modifier.Commit();
+
+                hitCount = GetHitCount(dir, term);
+
+                // Make sure the delete was successfully flushed:
+                Assert.AreEqual(0, hitCount);
+
+                modifier.Close();
+                dir.Close();
+            }
+        }
+
+        // This test tests that the files created by the docs writer before
+        // a segment is written are cleaned up if there's an i/o error
+
+        [Test]
+        public virtual void TestErrorInDocsWriterAdd()
+        {
+
+            MockRAMDirectory.Failure failure = new AnonymousClassFailure1(this);
+
+            // create a couple of files
+
+            System.String[] keywords = new System.String[] { "1", "2" };
+            System.String[] unindexed = new System.String[] { "Netherlands", "Italy" };
+            System.String[] unstored = new System.String[] { "Amsterdam has lots of bridges", "Venice has lots of canals" };
+            System.String[] text = new System.String[] { "Amsterdam", "Venice" };
+
+            for (int pass = 0; pass < 2; pass++)
+            {
+                bool autoCommit = (0 == pass);
+                MockRAMDirectory dir = new MockRAMDirectory();
+                IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
+
+                dir.FailOn(failure.Reset());
+
+                for (int i = 0; i < keywords.Length; i++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new Field("id", keywords[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    doc.Add(new Field("country", unindexed[i], Field.Store.YES, Field.Index.NO));
+                    doc.Add(new Field("contents", unstored[i], Field.Store.NO, Field.Index.ANALYZED));
+                    doc.Add(new Field("city", text[i], Field.Store.YES, Field.Index.ANALYZED));
+                    try
+                    {
+                        modifier.AddDocument(doc);
+                    }
+                    catch (System.IO.IOException io)
+                    {
+                        break;
+                    }
+                }
+
+                Support.EquatableList<string> startFiles = new Support.EquatableList<string>(dir.ListAll());
+                SegmentInfos infos = new SegmentInfos();
+                infos.Read(dir);
+                new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null, null);
+                Support.EquatableList<string> endFiles = new Support.EquatableList<string>(dir.ListAll());
+
+                if (!startFiles.Equals(endFiles))
+                {
+                    Assert.Fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    " + startFiles.ToString() + "\n  after delete:\n    " + endFiles.ToString());
+                }
+
+                modifier.Close();
+            }
+        }
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestStressIndexing2.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Index/TestStressIndexing2.cs Sun May 15 00:45:27 2011
@@ -16,6 +16,8 @@
  */
 
 using System;
+using System.Linq;
+using System.Collections.Generic;
 
 using NUnit.Framework;
 
@@ -150,13 +152,13 @@ namespace Lucene.Net.Index
 		
 		public class DocsAndWriter
 		{
-			internal System.Collections.IDictionary docs;
+			internal Dictionary<string,Document> docs;
 			internal IndexWriter writer;
 		}
 		
 		public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir)
 		{
-			System.Collections.Hashtable docs = new System.Collections.Hashtable();
+            Dictionary<string, Document> docs = new Dictionary<string, Document>();
 			IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
 			w.SetUseCompoundFile(false);
 			
@@ -200,7 +202,7 @@ namespace Lucene.Net.Index
 				IndexingThread th = threads[i];
 				lock (th)
 				{
-					Support.CollectionsHelper.AddAllIfNotContains(docs, th.docs);
+                    docs.Union(th.docs);
 				}
 			}
 			
@@ -252,10 +254,9 @@ namespace Lucene.Net.Index
 					IndexingThread th = threads[i];
 					lock (th)
 					{
-                        System.Collections.IEnumerator e = th.docs.Keys.GetEnumerator();
-                        while (e.MoveNext())
+                        foreach(string key in th.docs.Keys)
                         {
-                            docs[e.Current] = th.docs[e.Current];
+                            docs[key] = th.docs[key];
                         }
 					}
 				}
@@ -579,7 +580,7 @@ namespace Lucene.Net.Index
 			internal int base_Renamed;
 			internal int range;
 			internal int iterations;
-			internal System.Collections.IDictionary docs = new System.Collections.Hashtable(); // Map<String,Document>
+            internal Dictionary<string, Document> docs = new Dictionary<string, Document>(); // Map<String,Document>
 			internal System.Random r;
 			
 			public virtual int NextInt(int lim)
@@ -658,7 +659,7 @@ namespace Lucene.Net.Index
 			public virtual System.String GetUTF8String(int nTokens)
 			{
 				int upto = 0;
-				Support.CollectionsHelper.Fill(buffer, (char) 0);
+                for(int i=0;i<buffer.Length;i++) buffer[i]=(char) 0;
 				for (int i = 0; i < nTokens; i++)
 					upto = AddUTF8Token(upto);
 				return new System.String(buffer, 0, upto);

Modified: incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Search/CheckHits.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Search/CheckHits.cs?rev=1103259&r1=1103258&r2=1103259&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Search/CheckHits.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net_2_9_4g/test/core/Search/CheckHits.cs Sun May 15 00:45:27 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.Collections.Generic;
 
 using NUnit.Framework;
 
@@ -171,21 +172,19 @@ namespace Lucene.Net.Search
 			}
 			
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
-			
-			System.Collections.ArrayList correct = new System.Collections.ArrayList();
+
+            SortedSet<int> correct = new SortedSet<int>();
 			for (int i = 0; i < results.Length; i++)
 			{
-                Support.CollectionsHelper.AddIfNotContains(correct, results[i]);
+                correct.Add(results[i]);
 			}
-            correct.Sort();
-			
-			System.Collections.ArrayList actual = new System.Collections.ArrayList();
+
+            SortedSet<int> actual = new SortedSet<int>();
 			for (int i = 0; i < hits.Length; i++)
 			{
-				Support.CollectionsHelper.AddIfNotContains(actual, hits[i].doc);
+				actual.Add(hits[i].doc);
 			}
-            actual.Sort();
-			
+            			
 			Assert.AreEqual(correct, actual, query.ToString(defaultFieldName));
 			
 			QueryUtils.Check(query, searcher);



Mime
View raw message