lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r677059 [3/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/ ...
Date Tue, 15 Jul 2008 21:44:10 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/DocHelper.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/DocHelper.cs Tue Jul 15 14:44:04 2008
@@ -1,4 +1,4 @@
-/*
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -40,14 +40,14 @@
 		public const System.String TEXT_FIELD_2_KEY = "textField2";
 		public static Field textField2;
 		
-        public const System.String FIELD_2_COMPRESSED_TEXT = "field field field two text";
-        //Fields will be lexicographically sorted.  So, the order is: field, text, two
-        public static readonly int[] COMPRESSED_FIELD_2_FREQS = new int[]{3, 1, 1};
-        public const System.String COMPRESSED_TEXT_FIELD_2_KEY = "compressedTextField2";
-        public static Field compressedTextField2;
+		public const System.String FIELD_2_COMPRESSED_TEXT = "field field field two text";
+		//Fields will be lexicographically sorted.  So, the order is: field, text, two
+		public static readonly int[] COMPRESSED_FIELD_2_FREQS = new int[]{3, 1, 1};
+		public const System.String COMPRESSED_TEXT_FIELD_2_KEY = "compressedTextField2";
+		public static Field compressedTextField2;
 
 
-        public const System.String FIELD_3_TEXT = "aaaNoNorms aaaNoNorms bbbNoNorms";
+		public const System.String FIELD_3_TEXT = "aaaNoNorms aaaNoNorms bbbNoNorms";
 		public const System.String TEXT_FIELD_3_KEY = "textField3";
 		public static Field textField3;
 		
@@ -72,37 +72,38 @@
 		public const System.String UNSTORED_FIELD_2_KEY = "unStoredField2";
 		public static Field unStoredField2;
 		
-        public const System.String LAZY_FIELD_BINARY_KEY = "lazyFieldBinary";
-        public static byte[] LAZY_FIELD_BINARY_BYTES;
-        public static Field lazyFieldBinary;
-		
-        public const System.String LAZY_FIELD_KEY = "lazyField";
-        public const System.String LAZY_FIELD_TEXT = "These are some field bytes";
-        public static Field lazyField;
-		
-        public const System.String LARGE_LAZY_FIELD_KEY = "largeLazyField";
-        public static System.String LARGE_LAZY_FIELD_TEXT;
-        public static Field largeLazyField;
-		
-        //From Issue 509
-        public const System.String FIELD_UTF1_TEXT = "field one \u4e00text";
-        public const System.String TEXT_FIELD_UTF1_KEY = "textField1Utf8";
-        public static Field textUtfField1;
-		
-        public const System.String FIELD_UTF2_TEXT = "field field field \u4e00two text";
-        //Fields will be lexicographically sorted.  So, the order is: field, text, two
-        public static readonly int[] FIELD_UTF2_FREQS = new int[]{3, 1, 1};
-        public const System.String TEXT_FIELD_UTF2_KEY = "textField2Utf8";
-        public static Field textUtfField2;
+		public const System.String LAZY_FIELD_BINARY_KEY = "lazyFieldBinary";
+		public static byte[] LAZY_FIELD_BINARY_BYTES;
+		public static Field lazyFieldBinary;
+		
+		public const System.String LAZY_FIELD_KEY = "lazyField";
+		public const System.String LAZY_FIELD_TEXT = "These are some field bytes";
+		public static Field lazyField;
+		
+		public const System.String LARGE_LAZY_FIELD_KEY = "largeLazyField";
+		public static System.String LARGE_LAZY_FIELD_TEXT;
+		public static Field largeLazyField;
+		
+		//From Issue 509
+		public const System.String FIELD_UTF1_TEXT = "field one \u4e00text";
+		public const System.String TEXT_FIELD_UTF1_KEY = "textField1Utf8";
+		public static Field textUtfField1;
+
+		public const System.String FIELD_UTF2_TEXT = "field field field \u4e00two text";
+		//Fields will be lexicographically sorted.  So, the order is: field, text, two
+		public static readonly int[] FIELD_UTF2_FREQS = new int[]{3, 1, 1};
+		public const System.String TEXT_FIELD_UTF2_KEY = "textField2Utf8";
+		public static Field textUtfField2;
 		
 		
 		
 		
-        public static System.Collections.IDictionary nameValues = null;
+		public static System.Collections.IDictionary nameValues = null;
 		
 		// ordered list of all the fields...
-		// could use LinkedHashMap for this purpose if Java1.4 is OK
-        public static Field[] fields = new Field[]{textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField};
+		// this results in null entries in array....
+		//public static Field[] fields = new Field[]{textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField};
+		public static Field[] fields;
 		
 		// Map<String fieldName, Field field>
 		public static System.Collections.IDictionary all = new System.Collections.Hashtable();
@@ -118,6 +119,15 @@
 		
 		private static void  Add(System.Collections.IDictionary map, Fieldable field)
 		{
+			if (field == null) System.Console.WriteLine("FIELD IS NULL!!!");
+			if (field == null) System.Console.WriteLine("FIELD IS NULL!!!");
+			if (field == null) System.Console.WriteLine("FIELD IS NULL!!!");
+			if (map == null) System.Console.WriteLine("MAP IS NULL!!!");
+			if (map == null) System.Console.WriteLine("MAP IS NULL!!!");
+			if (map == null) System.Console.WriteLine("MAP IS NULL!!!");
+			if (field.Name() == null) System.Console.WriteLine("FIELD NAME IS NULL!!!");
+			if (field.Name() == null) System.Console.WriteLine("FIELD NAME IS NULL!!!");
+			if (field.Name() == null) System.Console.WriteLine("FIELD NAME IS NULL!!!");
 			map[field.Name()] = field;
 		}
 		
@@ -132,135 +142,85 @@
 			}
 		}
 		
-		/// <summary> Writes the document to the directory using a segment named "test"</summary>
-		/// <param name="dir">
-		/// </param>
-		/// <param name="doc">
-		/// </param>
-		/// <throws>  IOException </throws>
-		public static void  WriteDoc(Directory dir, Lucene.Net.Documents.Document doc)
-		{
-			WriteDoc(dir, "test", doc);
-		}
-		
-		/// <summary> Writes the document to the directory in the given segment</summary>
-		/// <param name="dir">
-		/// </param>
-		/// <param name="segment">
-		/// </param>
-		/// <param name="doc">
-		/// </param>
-		/// <throws>  IOException </throws>
-		public static void  WriteDoc(Directory dir, System.String segment, Lucene.Net.Documents.Document doc)
-		{
-			Similarity similarity = Similarity.GetDefault();
-			WriteDoc(dir, new WhitespaceAnalyzer(), similarity, segment, doc);
-		}
-		
-		/// <summary> Writes the document to the directory segment named "test" using the specified analyzer and similarity</summary>
+		/// <summary> Writes the document to the directory using a segment
+		/// named "test"; returns the SegmentInfo describing the new
+		/// segment 
+		/// </summary>
 		/// <param name="dir">
 		/// </param>
-		/// <param name="analyzer">
-		/// </param>
-		/// <param name="similarity">
-		/// </param>
 		/// <param name="doc">
 		/// </param>
 		/// <throws>  IOException </throws>
-		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Lucene.Net.Documents.Document doc)
+		public static SegmentInfo WriteDoc(Directory dir, Document doc)
 		{
-			WriteDoc(dir, analyzer, similarity, "test", doc);
+			return WriteDoc(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), doc);
 		}
 		
-		/// <summary> Writes the document to the directory segment using the analyzer and the similarity score</summary>
+		/// <summary> Writes the document to the directory using the analyzer
+		/// and the similarity score; returns the SegmentInfo
+		/// describing the new segment
+		/// </summary>
 		/// <param name="dir">
 		/// </param>
 		/// <param name="analyzer">
 		/// </param>
 		/// <param name="similarity">
 		/// </param>
-		/// <param name="segment">
-		/// </param>
 		/// <param name="doc">
 		/// </param>
 		/// <throws>  IOException </throws>
-		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, System.String segment, Lucene.Net.Documents.Document doc)
+		public static SegmentInfo WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc)
 		{
-			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
-			writer.AddDocument(segment, doc);
+			IndexWriter writer = new IndexWriter(dir, analyzer);
+			writer.SetSimilarity(similarity);
+			//writer.setUseCompoundFile(false);
+			writer.AddDocument(doc);
+			writer.Flush();
+			SegmentInfo info = writer.NewestSegment();
+			writer.Close();
+			return info;
 		}
 		
-		public static int NumFields(Lucene.Net.Documents.Document doc)
+		public static int NumFields(Document doc)
 		{
 			return doc.GetFields().Count;
 		}
-
-            /*
-0        textField1, 
-1        textField2, 
-2        textField3, 
-3        compressedTextField2, 
-4        keyField, 
-5        noNormsField, 
-6        unIndField, 
-7        unStoredField1, 
-8        unStoredField2, 
-9        textUtfField1, 
-10       textUtfField2, 
-11       lazyField, 
-12       lazyFieldBinary, 
-13       largeLazyField
-            */
-
-        static DocHelper()
+		static DocHelper()
 		{
-            textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
-            fields[0] = textField1;
-            textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
-            fields[1] = textField2;
-            textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
-            fields[2] = textField3;
-            {
-                textField3.SetOmitNorms(true);
-            }
-            compressedTextField2 = new Field(COMPRESSED_TEXT_FIELD_2_KEY, FIELD_2_COMPRESSED_TEXT, Field.Store.COMPRESS, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
-            fields[3] = compressedTextField2;
-            keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.UN_TOKENIZED);
-            fields[4] = keyField;
-            noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NO_NORMS);
-            fields[5] = noNormsField;
-            unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, Field.Store.YES, Field.Index.NO);
-            fields[6] = unIndField;
-            unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO);
-            fields[7] = unStoredField1;
-            unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES);
-            fields[8] = unStoredField2;
-            textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
-            fields[9] = textUtfField1;
-            textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
-            fields[10] = textUtfField2;
-            lazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
-            fields[11] = lazyField;
+			textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
+			textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+			compressedTextField2 = new Field(COMPRESSED_TEXT_FIELD_2_KEY, FIELD_2_COMPRESSED_TEXT, Field.Store.COMPRESS, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+			textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
 			{
-                //Initialize the large Lazy Field
-                System.Text.StringBuilder buffer = new System.Text.StringBuilder();
-                for (int i = 0; i < 10000; i++)
-                {
-                    buffer.Append("Lazily loading lengths of language in lieu of laughing ");
-                }
-				
-                try
-                {
-                    LAZY_FIELD_BINARY_BYTES = System.Text.Encoding.UTF8.GetBytes("These are some binary field bytes");
-                }
-                catch (System.IO.IOException e)
-                {
-                }
-                lazyFieldBinary = new Field(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES, Field.Store.YES);
-                fields[fields.Length - 2] = lazyFieldBinary;
-                LARGE_LAZY_FIELD_TEXT = buffer.ToString();
-                largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
-                fields[fields.Length - 1] = largeLazyField;
+				textField3.SetOmitNorms(true);
+			}
+			keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.UN_TOKENIZED);
+			noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NO_NORMS);
+			unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, Field.Store.YES, Field.Index.NO);
+			unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.NO);
+			unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES);
+			lazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
+			textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO);
+			textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
+			{
+				//Initialize the large Lazy Field
+				System.Text.StringBuilder buffer = new System.Text.StringBuilder();
+				for (int i = 0; i < 10000; i++)
+				{
+					buffer.Append("Lazily loading lengths of language in lieu of laughing ");
+				}
+
+				try
+				{
+					LAZY_FIELD_BINARY_BYTES = System.Text.Encoding.GetEncoding("UTF-8").GetBytes("These are some binary field bytes");
+				}
+				catch (System.IO.IOException)
+				{
+				}
+				lazyFieldBinary = new Field(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES, Field.Store.YES);
+				LARGE_LAZY_FIELD_TEXT = buffer.ToString();
+				largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
+				fields = new Field[] { textField1, textField2, textField3, compressedTextField2, keyField, noNormsField, unIndField, unStoredField1, unStoredField2, textUtfField1, textUtfField2, lazyField, lazyFieldBinary, largeLazyField };
 				for (int i = 0; i < fields.Length; i++)
 				{
 					Fieldable f = fields[i];
@@ -279,26 +239,27 @@
 						Add(unstored, f);
 					if (f.GetOmitNorms())
 						Add(noNorms, f);
-                    if (f.IsLazy())
-                        Add(lazy, f);
-                }
+					if (f.IsLazy())
+						Add(lazy, f);
+				}
 			}
+
 			{
-                nameValues = new System.Collections.Hashtable();
-                nameValues[TEXT_FIELD_1_KEY] = FIELD_1_TEXT;
-                nameValues[TEXT_FIELD_2_KEY] = FIELD_2_TEXT;
-                nameValues[COMPRESSED_TEXT_FIELD_2_KEY] = FIELD_2_COMPRESSED_TEXT;
-                nameValues[TEXT_FIELD_3_KEY] = FIELD_3_TEXT;
-                nameValues[KEYWORD_FIELD_KEY] = KEYWORD_TEXT;
-                nameValues[NO_NORMS_KEY] = NO_NORMS_TEXT;
-                nameValues[UNINDEXED_FIELD_KEY] = UNINDEXED_FIELD_TEXT;
-                nameValues[UNSTORED_FIELD_1_KEY] = UNSTORED_1_FIELD_TEXT;
-                nameValues[UNSTORED_FIELD_2_KEY] = UNSTORED_2_FIELD_TEXT;
-                nameValues[LAZY_FIELD_KEY] = LAZY_FIELD_TEXT;
-                nameValues[LAZY_FIELD_BINARY_KEY] = LAZY_FIELD_BINARY_BYTES;
-                nameValues[LARGE_LAZY_FIELD_KEY] = LARGE_LAZY_FIELD_TEXT;
-                nameValues[TEXT_FIELD_UTF1_KEY] = FIELD_UTF1_TEXT;
-                nameValues[TEXT_FIELD_UTF2_KEY] = FIELD_UTF2_TEXT;
+				nameValues = new System.Collections.Hashtable();
+				nameValues[TEXT_FIELD_1_KEY] = FIELD_1_TEXT;
+				nameValues[TEXT_FIELD_2_KEY] = FIELD_2_TEXT;
+				nameValues[COMPRESSED_TEXT_FIELD_2_KEY] = FIELD_2_COMPRESSED_TEXT;
+				nameValues[TEXT_FIELD_3_KEY] = FIELD_3_TEXT;
+				nameValues[KEYWORD_FIELD_KEY] = KEYWORD_TEXT;
+				nameValues[NO_NORMS_KEY] = NO_NORMS_TEXT;
+				nameValues[UNINDEXED_FIELD_KEY] = UNINDEXED_FIELD_TEXT;
+				nameValues[UNSTORED_FIELD_1_KEY] = UNSTORED_1_FIELD_TEXT;
+				nameValues[UNSTORED_FIELD_2_KEY] = UNSTORED_2_FIELD_TEXT;
+				nameValues[LAZY_FIELD_KEY] = LAZY_FIELD_TEXT;
+				nameValues[LAZY_FIELD_BINARY_KEY] = LAZY_FIELD_BINARY_BYTES;
+				nameValues[LARGE_LAZY_FIELD_KEY] = LARGE_LAZY_FIELD_TEXT;
+				nameValues[TEXT_FIELD_UTF1_KEY] = FIELD_UTF1_TEXT;
+				nameValues[TEXT_FIELD_UTF2_KEY] = FIELD_UTF2_TEXT;
 			}
 		}
 	}

Added: incubator/lucene.net/trunk/C#/src/Test/Index/DocTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/DocTest.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/DocTest.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/DocTest.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// {{Aroush-2.3.1}} remove this file from SVN
+/*
+using System;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+//using FileDocument = Lucene.Net.Demo.FileDocument;
+using Document = Lucene.Net.Documents.Document;
+using Similarity = Lucene.Net.Search.Similarity;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+namespace Lucene.Net.Index
+{
+	
+	// FIXME: OG: remove hard-coded file names
+	class DocTest
+	{
+		[STAThread]
+		public static void  Main(System.String[] args)
+		{
+			try
+			{
+				Directory directory = FSDirectory.GetDirectory("test", true);
+				directory.Close();
+				
+				IndexDoc("one", "test.txt");
+				PrintSegment("one");
+				IndexDoc("two", "test2.txt");
+				PrintSegment("two");
+				
+				Merge("one", "two", "merge");
+				PrintSegment("merge");
+				
+				Merge("one", "two", "merge2");
+				PrintSegment("merge2");
+				
+				Merge("merge", "merge2", "merge3");
+				PrintSegment("merge3");
+			}
+			catch (System.Exception e)
+			{
+				System.Console.Out.WriteLine(" caught a " + e.GetType() + "\n with message: " + e.Message);
+                System.Console.Error.WriteLine(e.StackTrace);
+			}
+		}
+		
+		public static void  IndexDoc(System.String segment, System.String fileName)
+		{
+			Directory directory = FSDirectory.GetDirectory("test", false);
+			Analyzer analyzer = new SimpleAnalyzer();
+			DocumentWriter writer = new DocumentWriter(directory, analyzer, Similarity.GetDefault(), 1000);
+			
+			System.IO.FileInfo file = new System.IO.FileInfo(fileName);
+			Document doc = Lucene.Net.Demo.FileDocument.Document(file);
+			
+			writer.AddDocument(segment, doc);
+			
+			directory.Close();
+		}
+		
+		internal static void  Merge(System.String seg1, System.String seg2, System.String merged)
+		{
+			Directory directory = FSDirectory.GetDirectory("test", false);
+			
+			SegmentReader r1 = new SegmentReader(new SegmentInfo(seg1, 1, directory));
+			SegmentReader r2 = new SegmentReader(new SegmentInfo(seg2, 1, directory));
+			
+			SegmentMerger merger = new SegmentMerger(directory, merged, false);
+			merger.Add(r1);
+			merger.Add(r2);
+			merger.Merge();
+			merger.CloseReaders();
+			
+			directory.Close();
+		}
+		
+		internal static void  PrintSegment(System.String segment)
+		{
+			Directory directory = FSDirectory.GetDirectory("test", false);
+			SegmentReader reader = new SegmentReader(new SegmentInfo(segment, 1, directory));
+			
+			for (int i = 0; i < reader.NumDocs(); i++)
+			{
+				System.Console.Out.WriteLine(reader.Document(i));
+			}
+			
+			TermEnum tis = reader.Terms();
+			while (tis.Next())
+			{
+				System.Console.Out.Write(tis.Term());
+				System.Console.Out.WriteLine(" DF=" + tis.DocFreq());
+				
+				TermPositions positions = reader.TermPositions(tis.Term());
+				try
+				{
+					while (positions.Next())
+					{
+						System.Console.Out.Write(" doc=" + positions.Doc());
+						System.Console.Out.Write(" TF=" + positions.Freq());
+						System.Console.Out.Write(" pos=");
+						System.Console.Out.Write(positions.NextPosition());
+						for (int j = 1; j < positions.Freq(); j++)
+							System.Console.Out.Write("," + positions.NextPosition());
+						System.Console.Out.WriteLine("");
+					}
+				}
+				finally
+				{
+					positions.Close();
+				}
+			}
+			tis.Close();
+			reader.Close();
+			directory.Close();
+		}
+	}
+}
+*/
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/MockIndexInput.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/MockIndexInput.cs Tue Jul 15 14:44:04 2008
@@ -34,7 +34,7 @@
 			length = bytes.Length;
 		}
 		
-		public override void  ReadInternal(byte[] dest, int destOffset, int len)
+		protected override void  ReadInternal(byte[] dest, int destOffset, int len)
 		{
 			int remainder = len;
 			int start = pointer;
@@ -57,7 +57,7 @@
 			// ignore
 		}
 		
-		public override void  SeekInternal(long pos)
+		protected override void  SeekInternal(long pos)
 		{
 			pointer = (int) pos;
 		}

Added: incubator/lucene.net/trunk/C#/src/Test/Index/MockInputStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/MockInputStream.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/MockInputStream.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/MockInputStream.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// {{Aroush-2.3.1}} remove this file from SVN
+/*
+using System;
+using InputStream = Lucene.Net.Store.InputStream;
+namespace Lucene.Net.Index
+{
+	
+	public class MockInputStream : InputStream
+	{
+		private byte[] buffer;
+		private int pointer = 0;
+		
+		public MockInputStream(byte[] bytes)
+		{
+			buffer = bytes;
+			length = bytes.Length;
+		}
+		
+		public override void  ReadInternal(byte[] dest, int destOffset, int len)
+		{
+			int remainder = len;
+			int start = pointer;
+			while (remainder != 0)
+			{
+				//          int bufferNumber = start / buffer.length;
+				int bufferOffset = start % buffer.Length;
+				int bytesInBuffer = buffer.Length - bufferOffset;
+				int bytesToCopy = bytesInBuffer >= remainder?remainder:bytesInBuffer;
+				Array.Copy(buffer, bufferOffset, dest, destOffset, bytesToCopy);
+				destOffset += bytesToCopy;
+				start += bytesToCopy;
+				remainder -= bytesToCopy;
+			}
+			pointer += len;
+		}
+		
+		public override void  Close()
+		{
+			// ignore
+		}
+		
+		public override void  SeekInternal(long pos)
+		{
+			pointer = (int) pos;
+		}
+	}
+}
+*/
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/Store/FSDirectoryTestCase.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/FSDirectoryTestCase.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/FSDirectoryTestCase.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/FSDirectoryTestCase.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using NUnit.Framework;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+namespace Lucene.Net.Index.Store
+{
+	[TestFixture]
+	abstract public class FSDirectoryTestCase
+	{
+		private FSDirectory directory;
+		
+		protected internal FSDirectory GetDirectory()
+		{
+			return GetDirectory(false);
+		}
+		
+		protected internal FSDirectory GetDirectory(bool create)
+		{
+			if (directory == null)
+			{
+				directory = FSDirectory.GetDirectory(SupportClass.AppSettings.Get("test.index.dir", "."), create);
+			}
+			
+			return directory;
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestFSDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/TestFSDirectory.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestFSDirectory.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestFSDirectory.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,229 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+
+namespace Lucene.Net.Index.Store
+{
+	
+	/// <summary> Test to illustrate the problem found when trying to open an IndexWriter in
+	/// a situation where the the property <code>Lucene.Net.lockDir</code>
+	/// was not set and the one specified by <code>java.io.tmpdir</code> had been
+	/// set to a non-existent path. What I observed is that this combination of
+	/// conditions resulted in a <code>NullPointerException</code> being thrown in
+	/// the <code>create()</code> method in <code>FSDirectory</code>, where
+	/// <code>files.length</code> is de-referenced, but <code>files</code> is
+	/// </code>null</code>.
+	/// 
+	/// </summary>
+	/// <author>  Michael Goddard
+	/// </author>
+	
+	[TestFixture]
+	public class TestFSDirectory
+	{
+		
+		/// <summary> What happens if the Lucene lockDir doesn't exist?
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+		public virtual void  TestNonExistentTmpDir()
+		{
+			orgApacheLuceneLockDir = SupportClass.AppSettings.Get("Lucene.Net.lockDir", NON_EXISTENT_DIRECTORY);
+			//System.Configuration.ConfigurationSettings.AppSettings.Set("Lucene.Net.lockDir", NON_EXISTENT_DIRECTORY); // {{Aroush}} how do we setup an envirement variable in C#?
+			System.String exceptionClassName = OpenIndexWriter();
+			if (exceptionClassName == null || exceptionClassName.Equals("java.io.IOException"))
+				Assert.IsTrue(true);
+			else
+				Assert.Fail("Caught an unexpected Exception");
+		}
+		
+		/// <summary> What happens if the Lucene lockDir is a regular file instead of a
+		/// directory?
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+		public virtual void  TestTmpDirIsPlainFile()
+		{
+			shouldBeADirectory = new System.IO.FileInfo(NON_EXISTENT_DIRECTORY);
+			shouldBeADirectory.Create().Close();
+			System.String exceptionClassName = OpenIndexWriter();
+			if (exceptionClassName == null || exceptionClassName.Equals("java.io.IOException"))
+				Assert.IsTrue(true);
+			else
+				Assert.Fail("Caught an unexpected Exception");
+		}
+		
+		public static readonly System.String FILE_SEP = System.IO.Path.DirectorySeparatorChar.ToString();
+		
+		public static readonly System.String NON_EXISTENT_DIRECTORY = System.IO.Path.GetTempPath() + FILE_SEP + "highly_improbable_directory_name";
+		
+		public static readonly System.String TEST_INDEX_DIR = System.IO.Path.GetTempPath() + FILE_SEP + "temp_index";
+		
+		private System.String orgApacheLuceneLockDir;
+		
+		private System.IO.FileInfo shouldBeADirectory;
+		
+		[TearDown]
+		public virtual void  TearDown()
+		{
+			if (orgApacheLuceneLockDir != null)
+			{
+				SupportClass.AppSettings.Set("Lucene.Net.lockDir", orgApacheLuceneLockDir);
+			}
+			bool tmpBool = false;
+			if ((shouldBeADirectory != null) && 
+				System.IO.File.Exists(shouldBeADirectory.FullName) && 
+				System.IO.Directory.Exists(shouldBeADirectory.FullName))
+			{
+				tmpBool = true;
+			}
+			if (shouldBeADirectory != null && tmpBool)
+			{
+				try
+				{
+					bool tmpBool2;
+					if (System.IO.File.Exists(shouldBeADirectory.FullName))
+					{
+						System.IO.File.Delete(shouldBeADirectory.FullName);
+						tmpBool2 = true;
+					}
+					else if (System.IO.Directory.Exists(shouldBeADirectory.FullName))
+					{
+						System.IO.Directory.Delete(shouldBeADirectory.FullName);
+						tmpBool2 = true;
+					}
+					else
+						tmpBool2 = false;
+					bool generatedAux = tmpBool2;
+				}
+				catch (System.Exception e)
+				{
+					System.Console.Error.WriteLine(e.StackTrace);
+				}
+			}
+			System.IO.FileInfo deletableIndex = new System.IO.FileInfo(TEST_INDEX_DIR);
+			bool tmpBool3;
+			if (System.IO.File.Exists(deletableIndex.FullName))
+				tmpBool3 = true;
+			else
+				tmpBool3 = System.IO.Directory.Exists(deletableIndex.FullName);
+			if (tmpBool3)
+				try
+				{
+					RmDir(deletableIndex);
+				}
+				catch (System.Exception e)
+				{
+					System.Console.Error.WriteLine(e.StackTrace);
+				}
+		}
+		
+		/// <summary> Open an IndexWriter<br>
+		/// Catch any (expected) IOException<br>
+		/// Close the IndexWriter
+		/// </summary>
+		private static System.String OpenIndexWriter()
+		{
+			IndexWriter iw = null;
+			System.String ret = null;
+			try
+			{
+				iw = new IndexWriter(TEST_INDEX_DIR, new StandardAnalyzer(), true);
+			}
+			catch (System.IO.IOException e)
+			{
+				ret = e.ToString();
+				System.Console.Error.WriteLine(e.StackTrace);
+			}
+			catch (System.NullReferenceException e)
+			{
+				ret = e.ToString();
+				System.Console.Error.WriteLine(e.StackTrace);
+			}
+			finally
+			{
+				if (iw != null)
+				{
+					try
+					{
+						iw.Close();
+					}
+					catch (System.IO.IOException ioe)
+					{
+						// ignore this
+					}
+				}
+			}
+			return ret;
+		}
+		
+		private static void  RmDir(System.IO.FileInfo dirName)
+		{
+			bool tmpBool;
+			if (System.IO.File.Exists(dirName.FullName))
+				tmpBool = true;
+			else
+				tmpBool = System.IO.Directory.Exists(dirName.FullName);
+			if (tmpBool)
+			{
+				if (System.IO.Directory.Exists(dirName.FullName))
+				{
+					System.IO.FileInfo[] contents = SupportClass.FileSupport.GetFiles(dirName);
+					for (int i = 0; i < contents.Length; i++)
+						RmDir(contents[i]);
+					bool tmpBool2;
+					if (System.IO.File.Exists(dirName.FullName))
+					{
+						System.IO.File.Delete(dirName.FullName);
+						tmpBool2 = true;
+					}
+					else if (System.IO.Directory.Exists(dirName.FullName))
+					{
+						System.IO.Directory.Delete(dirName.FullName);
+						tmpBool2 = true;
+					}
+					else
+						tmpBool2 = false;
+					bool generatedAux = tmpBool2;
+				}
+				else
+				{
+					bool tmpBool3;
+					if (System.IO.File.Exists(dirName.FullName))
+					{
+						System.IO.File.Delete(dirName.FullName);
+						tmpBool3 = true;
+					}
+					else if (System.IO.Directory.Exists(dirName.FullName))
+					{
+						System.IO.Directory.Delete(dirName.FullName);
+						tmpBool3 = true;
+					}
+					else
+						tmpBool3 = false;
+					bool generatedAux2 = tmpBool3;
+				}
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/TestRAMDirectory.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs Tue Jul 15 14:44:04 2008
@@ -19,17 +19,18 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
-using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using English = Lucene.Net.Util.English;
-using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index.Store
 {
@@ -44,64 +45,65 @@
 	/// <version>  $Id: RAMDirectory.java 150537 2004-09-28 22:45:26 +0200 (Di, 28 Sep 2004) cutting $
 	/// </version>
 	[TestFixture]
-    public class TestRAMDirectory
+	public class TestRAMDirectory : LuceneTestCase
 	{
-        private class AnonymousClassThread : SupportClass.ThreadClass
-        {
-            public AnonymousClassThread(int num, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.MockRAMDirectory ramDir, TestRAMDirectory enclosingInstance)
-            {
-                InitBlock(num, writer, ramDir, enclosingInstance);
-            }
-            private void  InitBlock(int num, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.MockRAMDirectory ramDir, TestRAMDirectory enclosingInstance)
-            {
-                this.num = num;
-                this.writer = writer;
-                this.ramDir = ramDir;
-                this.enclosingInstance = enclosingInstance;
-            }
-            private int num;
-            private Lucene.Net.Index.IndexWriter writer;
-            private Lucene.Net.Store.MockRAMDirectory ramDir;
-            private TestRAMDirectory enclosingInstance;
-            public TestRAMDirectory Enclosing_Instance
-            {
-                get
-                {
-                    return enclosingInstance;
-                }
+		private class AnonymousClassThread : SupportClass.ThreadClass
+		{
+			public AnonymousClassThread(int num, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.MockRAMDirectory ramDir, TestRAMDirectory enclosingInstance)
+			{
+				InitBlock(num, writer, ramDir, enclosingInstance);
+			}
+			private void  InitBlock(int num, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.MockRAMDirectory ramDir, TestRAMDirectory enclosingInstance)
+			{
+				this.num = num;
+				this.writer = writer;
+				this.ramDir = ramDir;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private int num;
+			private Lucene.Net.Index.IndexWriter writer;
+			private Lucene.Net.Store.MockRAMDirectory ramDir;
+			private TestRAMDirectory enclosingInstance;
+			public TestRAMDirectory Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
 				
-            }
-            override public void  Run()
-            {
-                for (int j = 1; j < Enclosing_Instance.docsPerThread; j++)
-                {
-                    Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-                    doc.Add(new Field("sizeContent", English.IntToEnglish(num * Enclosing_Instance.docsPerThread + j).Trim(), Field.Store.YES, Field.Index.UN_TOKENIZED));
-                    try
-                    {
-                        writer.AddDocument(doc);
-                    }
-                    catch (System.IO.IOException e)
-                    {
-                        throw new System.SystemException("", e);
-                    }
-                    lock (ramDir)
-                    {
-                        Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
-                    }
-                }
-            }
-        }
+			}
+			override public void  Run()
+			{
+				for (int j = 1; j < Enclosing_Instance.docsPerThread; j++)
+				{
+					Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+					doc.Add(new Field("sizeContent", English.IntToEnglish(num * Enclosing_Instance.docsPerThread + j).Trim(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+					try
+					{
+						writer.AddDocument(doc);
+					}
+					catch (System.IO.IOException e)
+					{
+						throw new System.SystemException("", e);
+					}
+					lock (ramDir)
+					{
+						Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+					}
+				}
+			}
+		}
 		
-        private System.IO.FileInfo indexDir = null;
+		private System.IO.FileInfo indexDir = null;
 		
 		// add enough document so that the index will be larger than RAMDirectory.READ_BUFFER_SIZE
 		private int docsToAdd = 500;
 		
 		// setup the index
-        [SetUp]
-		public virtual void  SetUp()
+		[SetUp]
+		public override void  SetUp()
 		{
+			base.SetUp();
 			System.String tempDir = System.IO.Path.GetTempPath();
 			if (tempDir == null)
 				throw new System.IO.IOException("java.io.tmpdir undefined, cannot run test");
@@ -121,7 +123,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestRAMDirectory_Renamed_Method()
+		public virtual void  TestRAMDirectory_Renamed_Method()
 		{
 			
 			Directory dir = FSDirectory.GetDirectory(indexDir);
@@ -130,10 +132,10 @@
 			// close the underlaying directory and delete the index
 			dir.Close();
 			
-            // Check size
-            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			// Check size
+			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
 			
-            // open reader to test document count
+			// open reader to test document count
 			IndexReader reader = IndexReader.Open(ramDir);
 			Assert.AreEqual(docsToAdd, reader.NumDocs());
 			
@@ -152,16 +154,16 @@
 			searcher.Close();
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestRAMDirectoryFile()
 		{
 			
 			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir);
 			
-            // Check size
-            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			// Check size
+			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
 			
-            // open reader to test document count
+			// open reader to test document count
 			IndexReader reader = IndexReader.Open(ramDir);
 			Assert.AreEqual(docsToAdd, reader.NumDocs());
 			
@@ -181,15 +183,15 @@
 		}
 		
 		[Test]
-        public virtual void  TestRAMDirectoryString()
+		public virtual void  TestRAMDirectoryString()
 		{
 			
 			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
 			
-            // Check size
-            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			// Check size
+			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
 			
-            // open reader to test document count
+			// open reader to test document count
 			IndexReader reader = IndexReader.Open(ramDir);
 			Assert.AreEqual(docsToAdd, reader.NumDocs());
 			
@@ -208,53 +210,54 @@
 			searcher.Close();
 		}
 		
-        private int numThreads = 50;
-        private int docsPerThread = 40;
+		private int numThreads = 50;
+		private int docsPerThread = 40;
 		
-        [Test]
-        public virtual void  TestRAMDirectorySize()
-        {
-			
-            MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
-            IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false);
-            writer.Optimize();
-			
-            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
-			
-            SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[numThreads];
-            for (int i = 0; i < numThreads; i++)
-            {
-                int num = i;
-                threads[i] = new AnonymousClassThread(num, writer, ramDir, this);
-            }
-            for (int i = 0; i < numThreads; i++)
-                threads[i].Start();
-            for (int i = 0; i < numThreads; i++)
-                threads[i].Join();
+		[Test]
+		public virtual void  TestRAMDirectorySize()
+		{
+			
+			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
+			IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false);
+			writer.Optimize();
 			
-            writer.Optimize();
-            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
 			
-            writer.Close();
-        }
+			SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[numThreads];
+			for (int i = 0; i < numThreads; i++)
+			{
+				int num = i;
+				threads[i] = new AnonymousClassThread(num, writer, ramDir, this);
+			}
+			for (int i = 0; i < numThreads; i++)
+				threads[i].Start();
+			for (int i = 0; i < numThreads; i++)
+				threads[i].Join();
+			
+			writer.Optimize();
+			Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			
+			writer.Close();
+		}
 		
 		[Test]
-        public virtual void  TestSerializable()
-        {
-            Directory dir = new RAMDirectory();
-            System.IO.MemoryStream bos = new System.IO.MemoryStream(1024);
-            Assert.AreEqual(0, bos.Length, "initially empty");
-            System.IO.BinaryWriter out_Renamed = new System.IO.BinaryWriter(bos);
-            long headerSize = bos.Length;
-            System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
-            formatter.Serialize(out_Renamed.BaseStream, dir);
-            // out_Renamed.Close();
-            Assert.IsTrue(headerSize < bos.Length, "contains more then just header");
-        }
+		public virtual void  TestSerializable()
+		{
+			Directory dir = new RAMDirectory();
+			System.IO.MemoryStream bos = new System.IO.MemoryStream(1024);
+			Assert.AreEqual(0, bos.Length, "initially empty");
+			System.IO.BinaryWriter out_Renamed = new System.IO.BinaryWriter(bos);
+			long headerSize = bos.Length;
+			System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
+			formatter.Serialize(out_Renamed.BaseStream, dir);
+			// out_Renamed.Close();
+			Assert.IsTrue(headerSize < bos.Length, "contains more then just header");
+		}
 		
-        [TearDown]
-		public virtual void  TearDown()
+		[TearDown]
+		public override void TearDown()
 		{
+			base.TearDown();
 			// cleanup 
 			bool tmpBool;
 			if (System.IO.File.Exists(indexDir.FullName))

Added: incubator/lucene.net/trunk/C#/src/Test/Index/Store/_delete_TestFSDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/_delete_TestFSDirectory.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/_delete_TestFSDirectory.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/_delete_TestFSDirectory.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,234 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// {{Aroush-2.3.1}} Remove from 2.3.1
+
+/*
+using System;
+using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+
+namespace _delete_Lucene.Net.Index.Store
+{
+#if DELETE_ME
+	/// <summary> Test to illustrate the problem found when trying to open an IndexWriter in
+	/// a situation where the the property <code>Lucene.Net.lockDir</code>
+	/// was not set and the one specified by <code>java.io.tmpdir</code> had been
+	/// set to a non-existent path. What I observed is that this combination of
+	/// conditions resulted in a <code>NullPointerException</code> being thrown in
+	/// the <code>create()</code> method in <code>FSDirectory</code>, where
+	/// <code>files.length</code> is de-referenced, but <code>files</code> is
+	/// </code>null</code>.
+	/// 
+	/// </summary>
+	/// <author>  Michael Goddard
+	/// </author>
+	
+    [TestFixture]
+	public class TestFSDirectory
+	{
+		
+		/// <summary> What happens if the Lucene lockDir doesn't exist?
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+        public virtual void  TestNonExistentTmpDir()
+		{
+            orgApacheLuceneLockDir = System.Configuration.ConfigurationSettings.AppSettings.Get("Lucene.Net.lockDir");
+			//System.Configuration.ConfigurationSettings.AppSettings.Set("Lucene.Net.lockDir", NON_EXISTENT_DIRECTORY); // {{Aroush}} how do we setup an envirement variable in C#?
+			System.String exceptionClassName = OpenIndexWriter();
+			if (exceptionClassName == null || exceptionClassName.Equals("java.io.IOException"))
+				NUnit.Framework.Assert.IsTrue(true);
+			else
+				NUnit.Framework.Assert.Fail("Caught an unexpected Exception");
+		}
+		
+		/// <summary> What happens if the Lucene lockDir is a regular file instead of a
+		/// directory?
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		[Test]
+        public virtual void  TestTmpDirIsPlainFile()
+		{
+			shouldBeADirectory = new System.IO.FileInfo(NON_EXISTENT_DIRECTORY);
+            shouldBeADirectory.Create().Close();
+            System.String exceptionClassName = OpenIndexWriter();
+			if (exceptionClassName == null || exceptionClassName.Equals("java.io.IOException"))
+				NUnit.Framework.Assert.IsTrue(true);
+			else
+				NUnit.Framework.Assert.Fail("Caught an unexpected Exception");
+		}
+		
+		public static readonly System.String FILE_SEP = System.IO.Path.DirectorySeparatorChar.ToString();
+		
+		public static readonly System.String NON_EXISTENT_DIRECTORY = System.IO.Path.GetTempPath() + FILE_SEP + "highly_improbable_directory_name";
+		
+		public static readonly System.String TEST_INDEX_DIR = System.IO.Path.GetTempPath() + FILE_SEP + "temp_index";
+		
+		private System.String orgApacheLuceneLockDir;
+		
+		private System.IO.FileInfo shouldBeADirectory;
+		
+        [TearDown]
+		public virtual void  TearDown()
+		{
+			if (orgApacheLuceneLockDir != null)
+			{
+				System.Configuration.ConfigurationSettings.AppSettings.Set("Lucene.Net.lockDir", orgApacheLuceneLockDir);
+			}
+            bool tmpBool = false;
+            if ((shouldBeADirectory != null) && 
+                System.IO.File.Exists(shouldBeADirectory.FullName) && 
+                System.IO.Directory.Exists(shouldBeADirectory.FullName))
+            {
+                tmpBool = true;
+            }
+            if (shouldBeADirectory != null && tmpBool)
+			{
+				try
+				{
+					bool tmpBool2;
+					if (System.IO.File.Exists(shouldBeADirectory.FullName))
+					{
+						System.IO.File.Delete(shouldBeADirectory.FullName);
+						tmpBool2 = true;
+					}
+					else if (System.IO.Directory.Exists(shouldBeADirectory.FullName))
+					{
+						System.IO.Directory.Delete(shouldBeADirectory.FullName);
+						tmpBool2 = true;
+					}
+					else
+						tmpBool2 = false;
+					bool generatedAux = tmpBool2;
+				}
+				catch (System.Exception e)
+				{
+                    System.Console.Error.WriteLine(e.StackTrace);
+				}
+			}
+			System.IO.FileInfo deletableIndex = new System.IO.FileInfo(TEST_INDEX_DIR);
+			bool tmpBool3;
+			if (System.IO.File.Exists(deletableIndex.FullName))
+				tmpBool3 = true;
+			else
+				tmpBool3 = System.IO.Directory.Exists(deletableIndex.FullName);
+			if (tmpBool3)
+				try
+				{
+					RmDir(deletableIndex);
+				}
+				catch (System.Exception e)
+				{
+					System.Console.Error.WriteLine(e.StackTrace);
+				}
+		}
+		
+		/// <summary> Open an IndexWriter<br>
+		/// Catch any (expected) IOException<br>
+		/// Close the IndexWriter
+		/// </summary>
+		private static System.String OpenIndexWriter()
+		{
+			IndexWriter iw = null;
+			System.String ret = null;
+			try
+			{
+				iw = new IndexWriter(TEST_INDEX_DIR, new StandardAnalyzer(), true);
+			}
+			catch (System.IO.IOException e)
+			{
+				ret = e.ToString();
+				System.Console.Error.WriteLine(e.StackTrace);
+			}
+			catch (System.NullReferenceException e)
+			{
+				ret = e.ToString();
+				System.Console.Error.WriteLine(e.StackTrace);
+			}
+			finally
+			{
+				if (iw != null)
+				{
+					try
+					{
+						iw.Close();
+					}
+					catch (System.IO.IOException ioe)
+					{
+						// ignore this
+					}
+				}
+			}
+			return ret;
+		}
+		
+		private static void  RmDir(System.IO.FileInfo dirName)
+		{
+			bool tmpBool;
+			if (System.IO.File.Exists(dirName.FullName))
+				tmpBool = true;
+			else
+				tmpBool = System.IO.Directory.Exists(dirName.FullName);
+			if (tmpBool)
+			{
+				if (System.IO.Directory.Exists(dirName.FullName))
+				{
+					System.IO.FileInfo[] contents = SupportClass.FileSupport.GetFiles(dirName);
+					for (int i = 0; i < contents.Length; i++)
+						RmDir(contents[i]);
+					bool tmpBool2;
+					if (System.IO.File.Exists(dirName.FullName))
+					{
+						System.IO.File.Delete(dirName.FullName);
+						tmpBool2 = true;
+					}
+					else if (System.IO.Directory.Exists(dirName.FullName))
+					{
+						System.IO.Directory.Delete(dirName.FullName);
+						tmpBool2 = true;
+					}
+					else
+						tmpBool2 = false;
+					bool generatedAux = tmpBool2;
+				}
+				else
+				{
+					bool tmpBool3;
+					if (System.IO.File.Exists(dirName.FullName))
+					{
+						System.IO.File.Delete(dirName.FullName);
+						tmpBool3 = true;
+					}
+					else if (System.IO.Directory.Exists(dirName.FullName))
+					{
+						System.IO.Directory.Delete(dirName.FullName);
+						tmpBool3 = true;
+					}
+					else
+						tmpBool3 = false;
+					bool generatedAux2 = tmpBool3;
+				}
+			}
+		}
+	}
+#endif
+}
+*/
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TermInfosTest.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TermInfosTest.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TermInfosTest.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TermInfosTest.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// {{Aroush-2.3.1}} remove this file from SVN
+/*
+using System;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+namespace Lucene.Net.Index
+{
+	class TermInfosTest
+	{
+		[STAThread]
+		public static void  Main(System.String[] args)
+		{
+			try
+			{
+				Test();
+			}
+			catch (System.Exception e)
+			{
+				System.Console.Out.WriteLine(" caught a " + e.GetType() + "\n with message: " + e.Message);
+			}
+		}
+		
+		// FIXME: OG: remove hard-coded file names
+		public static void  Test()
+		{
+			
+			System.IO.FileInfo file = new System.IO.FileInfo("words.txt");
+			System.Console.Out.WriteLine(" reading word file containing " + file.Length + " bytes");
+			
+			System.DateTime start = System.DateTime.Now;
+			
+			System.Collections.ArrayList keys = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
+			System.IO.FileStream ws = new System.IO.FileStream(file.FullName, System.IO.FileMode.Open, System.IO.FileAccess.Read);
+			System.IO.StreamReader wr = new System.IO.StreamReader(new System.IO.StreamReader(ws, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(ws, System.Text.Encoding.Default).CurrentEncoding);
+			
+			for (System.String key = wr.ReadLine(); key != null; key = wr.ReadLine())
+				keys.Add(new Term("word", key));
+			wr.Close();
+			
+			System.DateTime end = System.DateTime.Now;
+			
+			System.Console.Out.Write(end.Ticks - start.Ticks);
+			System.Console.Out.WriteLine(" milliseconds to read " + keys.Count + " words");
+			
+			start = System.DateTime.Now;
+			
+			System.Random gen = new System.Random((System.Int32) 1251971);
+			long fp = (gen.Next() & 0xF) + 1;
+			long pp = (gen.Next() & 0xF) + 1;
+			int[] docFreqs = new int[keys.Count];
+			long[] freqPointers = new long[keys.Count];
+			long[] proxPointers = new long[keys.Count];
+			for (int i = 0; i < keys.Count; i++)
+			{
+				docFreqs[i] = (gen.Next() & 0xF) + 1;
+				freqPointers[i] = fp;
+				proxPointers[i] = pp;
+				fp += (gen.Next() & 0xF) + 1;
+				;
+				pp += (gen.Next() & 0xF) + 1;
+				;
+			}
+			
+			end = System.DateTime.Now;
+			
+			System.Console.Out.Write(end.Ticks - start.Ticks);
+			System.Console.Out.WriteLine(" milliseconds to generate values");
+			
+			start = System.DateTime.Now;
+			
+			Directory store = FSDirectory.GetDirectory("test.store", true);
+			FieldInfos fis = new FieldInfos();
+			
+			TermInfosWriter writer = new TermInfosWriter(store, "words", fis);
+			fis.Add("word", false);
+			
+			for (int i = 0; i < keys.Count; i++)
+				writer.Add((Term) keys[i], new TermInfo(docFreqs[i], freqPointers[i], proxPointers[i]));
+			
+			writer.Close();
+			
+			end = System.DateTime.Now;
+			
+			System.Console.Out.Write(end.Ticks - start.Ticks);
+			System.Console.Out.WriteLine(" milliseconds to write table");
+			
+			System.Console.Out.WriteLine(" table occupies " + store.FileLength("words.tis") + " bytes");
+			
+			start = System.DateTime.Now;
+			
+			TermInfosReader reader = new TermInfosReader(store, "words", fis);
+			
+			end = System.DateTime.Now;
+			
+			System.Console.Out.Write(end.Ticks - start.Ticks);
+			System.Console.Out.WriteLine(" milliseconds to open table");
+			
+			start = System.DateTime.Now;
+			
+			SegmentTermEnum enumerator = reader.Terms();
+			for (int i = 0; i < keys.Count; i++)
+			{
+				enumerator.Next();
+				Term key = (Term) keys[i];
+				if (!key.Equals(enumerator.Term()))
+				{
+					throw new System.Exception("wrong term: " + enumerator.Term() + ", expected: " + key + " at " + i);
+				}
+				TermInfo ti = enumerator.TermInfo();
+				if (ti.docFreq != docFreqs[i])
+					throw new System.Exception("wrong value: " + System.Convert.ToString(ti.docFreq, 16) + ", expected: " + System.Convert.ToString(docFreqs[i], 16) + " at " + i);
+				if (ti.freqPointer != freqPointers[i])
+					throw new System.Exception("wrong value: " + System.Convert.ToString(ti.freqPointer, 16) + ", expected: " + System.Convert.ToString(freqPointers[i], 16) + " at " + i);
+				if (ti.proxPointer != proxPointers[i])
+					throw new System.Exception("wrong value: " + System.Convert.ToString(ti.proxPointer, 16) + ", expected: " + System.Convert.ToString(proxPointers[i], 16) + " at " + i);
+			}
+			
+			end = System.DateTime.Now;
+			
+			System.Console.Out.Write(end.Ticks - start.Ticks);
+			System.Console.Out.WriteLine(" milliseconds to iterate over " + keys.Count + " words");
+			
+			start = System.DateTime.Now;
+			
+			for (int i = 0; i < keys.Count; i++)
+			{
+				Term key = (Term) keys[i];
+				TermInfo ti = reader.Get(key);
+				if (ti.docFreq != docFreqs[i])
+					throw new System.Exception("wrong value: " + System.Convert.ToString(ti.docFreq, 16) + ", expected: " + System.Convert.ToString(docFreqs[i], 16) + " at " + i);
+				if (ti.freqPointer != freqPointers[i])
+					throw new System.Exception("wrong value: " + System.Convert.ToString(ti.freqPointer, 16) + ", expected: " + System.Convert.ToString(freqPointers[i], 16) + " at " + i);
+				if (ti.proxPointer != proxPointers[i])
+					throw new System.Exception("wrong value: " + System.Convert.ToString(ti.proxPointer, 16) + ", expected: " + System.Convert.ToString(proxPointers[i], 16) + " at " + i);
+			}
+			
+			end = System.DateTime.Now;
+			
+			System.Console.Out.Write((end.Ticks - start.Ticks) / (float) keys.Count);
+			System.Console.Out.WriteLine(" average milliseconds per lookup");
+			
+			TermEnum e = reader.Terms(new Term("word", "azz"));
+			System.Console.Out.WriteLine("Word after azz is " + e.Term().text);
+			
+			reader.Close();
+			
+			store.Close();
+		}
+	}
+}
+*/
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAddIndexesNoOptimize.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs Tue Jul 15 14:44:04 2008
@@ -19,19 +19,20 @@
 
 using NUnit.Framework;
 
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 
 namespace Lucene.Net.Index
 {
 	
-    [TestFixture]
-    public class TestAddIndexesNoOptimize
+	[TestFixture]
+	public class TestAddIndexesNoOptimize : LuceneTestCase
 	{
-        [Test]
+		[Test]
 		public virtual void  TestSimpleCase()
 		{
 			// main directory
@@ -126,7 +127,7 @@
 		}
 		
 		// case 0: add self or exceed maxMergeDocs, expect exception
-        [Test]
+		[Test]
 		public virtual void  TestAddSelf()
 		{
 			// main directory
@@ -164,9 +165,10 @@
 				writer.AddIndexesNoOptimize(new Directory[]{aux});
 				Assert.IsTrue(false);
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				Assert.AreEqual(100, writer.DocCount());
+				Assert.AreEqual(1, writer.GetSegmentCount());
 			}
 			
 			writer.SetMaxMergeDocs(maxMergeDocs);
@@ -176,7 +178,7 @@
 				writer.AddIndexesNoOptimize(new Directory[]{aux, dir});
 				Assert.IsTrue(false);
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				Assert.AreEqual(100, writer.DocCount());
 			}
@@ -189,7 +191,7 @@
 		// in all the remaining tests, make the doc count of the oldest segment
 		// in dir large so that it is never merged in addIndexesNoOptimize()
 		// case 1: no tail segments
-        [Test]
+		[Test]
 		public virtual void  TestNoTailSegments()
 		{
 			// main directory
@@ -215,7 +217,7 @@
 		}
 		
 		// case 2: tail segments, invariants hold, no copy
-        [Test]
+		[Test]
 		public virtual void  TestNoCopySegments()
 		{
 			// main directory
@@ -241,7 +243,7 @@
 		}
 		
 		// case 3: tail segments, invariants hold, copy, invariants hold
-        [Test]
+		[Test]
 		public virtual void  TestNoMergeAfterCopy()
 		{
 			// main directory
@@ -250,12 +252,11 @@
 			Directory aux = new RAMDirectory();
 			
 			SetUpDirs(dir, aux);
-			
+
 			IndexWriter writer = NewWriter(dir, false);
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(4);
-			
-			writer.AddIndexesNoOptimize(new Directory[]{aux, aux});
+			writer.AddIndexesNoOptimize(new Directory[] { aux, aux });
 			Assert.AreEqual(1060, writer.DocCount());
 			Assert.AreEqual(1000, writer.GetDocCount(0));
 			writer.Close();
@@ -265,7 +266,7 @@
 		}
 		
 		// case 4: tail segments, invariants hold, copy, invariants not hold
-        [Test]
+		[Test]
 		public virtual void  TestMergeAfterCopy()
 		{
 			// main directory
@@ -289,7 +290,6 @@
 			
 			writer.AddIndexesNoOptimize(new Directory[]{aux, aux});
 			Assert.AreEqual(1020, writer.DocCount());
-			Assert.AreEqual(2, writer.GetSegmentCount());
 			Assert.AreEqual(1000, writer.GetDocCount(0));
 			writer.Close();
 			
@@ -298,7 +298,7 @@
 		}
 		
 		// case 5: tail segments, invariants not hold
-        [Test]
+		[Test]
 		public virtual void  TestMoreMerges()
 		{
 			// main directory
@@ -348,7 +348,9 @@
 		
 		private IndexWriter NewWriter(Directory dir, bool create)
 		{
-			return new IndexWriter(dir, new WhitespaceAnalyzer(), create);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create);
+			writer.SetMergePolicy(new LogDocMergePolicy());
+			return writer;
 		}
 		
 		private void  AddDocs(IndexWriter writer, int numDocs)
@@ -396,7 +398,7 @@
 			
 			writer = NewWriter(dir, true);
 			writer.SetMaxBufferedDocs(1000);
-			// add 1000 documents
+			// add 1000 documents in 1 segment
 			AddDocs(writer, 1000);
 			Assert.AreEqual(1000, writer.DocCount());
 			Assert.AreEqual(1, writer.GetSegmentCount());

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAtomicUpdate.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAtomicUpdate.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.QueryParsers;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using Lucene.Net.Analysis;
+using Lucene.Net.Search;
+using Searchable = Lucene.Net.Search.Searchable;
+
+namespace Lucene.Net.Index
+{
+	
+	[TestFixture]
+	public class TestAtomicUpdate : LuceneTestCase
+	{
+		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
+		private static readonly System.Random RANDOM = new System.Random();
+		
+		abstract public class TimedThread : SupportClass.ThreadClass
+		{
+			internal bool failed;
+			internal int count;
+			private static int RUN_TIME_SEC = 3;
+			private TimedThread[] allThreads;
+			
+			abstract public void  DoWork();
+			
+			internal TimedThread(TimedThread[] threads)
+			{
+				this.allThreads = threads;
+			}
+			
+			override public void  Run()
+			{
+				long stopTime = (System.DateTime.Now.Ticks - 621355968000000000) / 10000 + 1000 * RUN_TIME_SEC;
+				
+				count = 0;
+				
+				try
+				{
+					while ((System.DateTime.Now.Ticks - 621355968000000000) / 10000 < stopTime && !AnyErrors())
+					{
+						DoWork();
+						count++;
+					}
+				}
+				catch (System.Exception e)
+				{
+					System.Console.Out.WriteLine(e.StackTrace);
+					failed = true;
+				}
+			}
+			
+			private bool AnyErrors()
+			{
+				for (int i = 0; i < allThreads.Length; i++)
+					if (allThreads[i] != null && allThreads[i].failed)
+						return true;
+				return false;
+			}
+		}
+		
+		private class IndexerThread : TimedThread
+		{
+			internal IndexWriter writer;
+			//new public int count;
+			
+			public IndexerThread(IndexWriter writer, TimedThread[] threads):base(threads)
+			{
+				this.writer = writer;
+			}
+			
+			public override void  DoWork()
+			{
+				// Update all 100 docs...
+				for (int i = 0; i < 100; i++)
+				{
+					Document d = new Document();
+					int n = Lucene.Net.Index.TestAtomicUpdate.RANDOM.Next();
+					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+					d.Add(new Field("contents", English.IntToEnglish(i + 10 * count), Field.Store.NO, Field.Index.TOKENIZED));
+					writer.UpdateDocument(new Term("id", System.Convert.ToString(i)), d);
+				}
+			}
+		}
+		
+		private class SearcherThread : TimedThread
+		{
+			private Directory directory;
+			
+			public SearcherThread(Directory directory, TimedThread[] threads):base(threads)
+			{
+				this.directory = directory;
+			}
+			
+			public override void  DoWork()
+			{
+				IndexReader r = IndexReader.Open(directory);
+				try
+				{
+					Assert.AreEqual(100, r.NumDocs());
+				}
+				catch (System.Exception t)
+				{
+					throw t;
+				}
+				r.Close();
+			}
+		}
+		
+		/*
+		Run one indexer and 2 searchers against single index as
+		stress test.
+		*/
+		public virtual void  RunTest(Directory directory)
+		{
+			
+			TimedThread[] threads = new TimedThread[4];
+			
+			IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
+			
+			// Establish a base index of 100 docs:
+			for (int i = 0; i < 100; i++)
+			{
+				Document d = new Document();
+				d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+				d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+				writer.AddDocument(d);
+			}
+			writer.Flush();
+			
+			IndexerThread indexerThread = new IndexerThread(writer, threads);
+			threads[0] = indexerThread;
+			indexerThread.Start();
+			
+			IndexerThread indexerThread2 = new IndexerThread(writer, threads);
+			threads[1] = indexerThread2;
+			indexerThread2.Start();
+			
+			SearcherThread searcherThread1 = new SearcherThread(directory, threads);
+			threads[2] = searcherThread1;
+			searcherThread1.Start();
+			
+			SearcherThread searcherThread2 = new SearcherThread(directory, threads);
+			threads[3] = searcherThread2;
+			searcherThread2.Start();
+			
+			indexerThread.Join();
+			indexerThread2.Join();
+			searcherThread1.Join();
+			searcherThread2.Join();
+			
+			writer.Close();
+			
+			Assert.IsTrue(!indexerThread.failed, "hit unexpected exception in indexer");
+			Assert.IsTrue(!indexerThread2.failed, "hit unexpected exception in indexer2");
+			Assert.IsTrue(!searcherThread1.failed, "hit unexpected exception in search1");
+			Assert.IsTrue(!searcherThread2.failed, "hit unexpected exception in search2");
+			//System.out.println("    Writer: " + indexerThread.count + " iterations");
+			//System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
+			//System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
+		}
+		
+		/*
+		Run above stress test against RAMDirectory and then
+		FSDirectory.
+		*/
+		[Test]
+		public virtual void  TestAtomicUpdates()
+		{
+			
+			Directory directory;
+			
+			// First in a RAM directory:
+			directory = new MockRAMDirectory();
+			RunTest(directory);
+			directory.Close();
+			
+			// Second in an FSDirectory:
+			System.String tempDir = System.IO.Path.GetTempPath();
+			System.IO.FileInfo dirPath = new System.IO.FileInfo(tempDir + "\\" + "lucene.test.atomic");
+			directory = FSDirectory.GetDirectory(dirPath);
+			RunTest(directory);
+			directory.Close();
+			_TestUtil.RmDir(dirPath);
+		}
+	}
+}
\ No newline at end of file



Mime
View raw message