lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r564939 [2/8] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/Standard/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Q...
Date Sat, 11 Aug 2007 16:56:44 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/Store/TestRAMDirectory.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/Store/TestRAMDirectory.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
@@ -27,6 +29,7 @@
 using FSDirectory = Lucene.Net.Store.FSDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using English = Lucene.Net.Util.English;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
 
 namespace Lucene.Net.Index.Store
 {
@@ -43,8 +46,54 @@
 	[TestFixture]
     public class TestRAMDirectory
 	{
+        private class AnonymousClassThread : SupportClass.ThreadClass
+        {
+            public AnonymousClassThread(int num, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.MockRAMDirectory ramDir, TestRAMDirectory enclosingInstance)
+            {
+                InitBlock(num, writer, ramDir, enclosingInstance);
+            }
+            private void  InitBlock(int num, Lucene.Net.Index.IndexWriter writer, Lucene.Net.Store.MockRAMDirectory ramDir, TestRAMDirectory enclosingInstance)
+            {
+                this.num = num;
+                this.writer = writer;
+                this.ramDir = ramDir;
+                this.enclosingInstance = enclosingInstance;
+            }
+            private int num;
+            private Lucene.Net.Index.IndexWriter writer;
+            private Lucene.Net.Store.MockRAMDirectory ramDir;
+            private TestRAMDirectory enclosingInstance;
+            public TestRAMDirectory Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+				
+            }
+            override public void  Run()
+            {
+                for (int j = 1; j < Enclosing_Instance.docsPerThread; j++)
+                {
+                    Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+                    doc.Add(new Field("sizeContent", English.IntToEnglish(num * Enclosing_Instance.docsPerThread + j).Trim(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+                    try
+                    {
+                        writer.AddDocument(doc);
+                    }
+                    catch (System.IO.IOException e)
+                    {
+                        throw new System.SystemException("", e);
+                    }
+                    lock (ramDir)
+                    {
+                        Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+                    }
+                }
+            }
+        }
 		
-		private System.IO.FileInfo indexDir = null;
+        private System.IO.FileInfo indexDir = null;
 		
 		// add enough document so that the index will be larger than RAMDirectory.READ_BUFFER_SIZE
 		private int docsToAdd = 500;
@@ -68,7 +117,6 @@
 				writer.AddDocument(doc);
 			}
 			Assert.AreEqual(docsToAdd, writer.DocCount());
-			writer.Optimize();
 			writer.Close();
 		}
 		
@@ -76,13 +124,16 @@
         public virtual void  TestRAMDirectory_Renamed_Method()
 		{
 			
-			Directory dir = FSDirectory.GetDirectory(indexDir, false);
-			RAMDirectory ramDir = new RAMDirectory(dir);
+			Directory dir = FSDirectory.GetDirectory(indexDir);
+			MockRAMDirectory ramDir = new MockRAMDirectory(dir);
 			
 			// close the underlaying directory and delete the index
 			dir.Close();
 			
-			// open reader to test document count
+            // Check size
+            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			
+            // open reader to test document count
 			IndexReader reader = IndexReader.Open(ramDir);
 			Assert.AreEqual(docsToAdd, reader.NumDocs());
 			
@@ -105,9 +156,12 @@
 		public virtual void  TestRAMDirectoryFile()
 		{
 			
-			RAMDirectory ramDir = new RAMDirectory(indexDir);
+			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir);
+			
+            // Check size
+            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
 			
-			// open reader to test document count
+            // open reader to test document count
 			IndexReader reader = IndexReader.Open(ramDir);
 			Assert.AreEqual(docsToAdd, reader.NumDocs());
 			
@@ -130,9 +184,12 @@
         public virtual void  TestRAMDirectoryString()
 		{
 			
-			RAMDirectory ramDir = new RAMDirectory(indexDir.FullName);
+			MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
 			
-			// open reader to test document count
+            // Check size
+            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			
+            // open reader to test document count
 			IndexReader reader = IndexReader.Open(ramDir);
 			Assert.AreEqual(docsToAdd, reader.NumDocs());
 			
@@ -150,6 +207,50 @@
 			reader.Close();
 			searcher.Close();
 		}
+		
+        private int numThreads = 50;
+        private int docsPerThread = 40;
+		
+        [Test]
+        public virtual void  TestRAMDirectorySize()
+        {
+			
+            MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.FullName);
+            IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false);
+            writer.Optimize();
+			
+            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			
+            SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[numThreads];
+            for (int i = 0; i < numThreads; i++)
+            {
+                int num = i;
+                threads[i] = new AnonymousClassThread(num, writer, ramDir, this);
+            }
+            for (int i = 0; i < numThreads; i++)
+                threads[i].Start();
+            for (int i = 0; i < numThreads; i++)
+                threads[i].Join();
+			
+            writer.Optimize();
+            Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
+			
+            writer.Close();
+        }
+		
+		[Test]
+        public virtual void  TestSerializable()
+        {
+            Directory dir = new RAMDirectory();
+            System.IO.MemoryStream bos = new System.IO.MemoryStream(1024);
+            Assert.AreEqual(0, bos.Length, "initially empty");
+            System.IO.BinaryWriter out_Renamed = new System.IO.BinaryWriter(bos);
+            long headerSize = bos.Length;
+            System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
+            formatter.Serialize(out_Renamed.BaseStream, dir);
+            out_Renamed.Close();
+            Assert.IsTrue(headerSize < bos.Length, "contains more then just header");
+        }
 		
         [TearDown]
 		public virtual void  TearDown()

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestAddIndexesNoOptimize.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestAddIndexesNoOptimize.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,424 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Index
+{
+	
+    [TestFixture]
+    public class TestAddIndexesNoOptimize
+	{
+        [Test]
+		public virtual void  TestSimpleCase()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// two auxiliary directories
+			Directory aux = new RAMDirectory();
+			Directory aux2 = new RAMDirectory();
+			
+			IndexWriter writer = null;
+			
+			writer = NewWriter(dir, true);
+			// add 100 documents
+			AddDocs(writer, 100);
+			Assert.AreEqual(100, writer.DocCount());
+			writer.Close();
+			
+			writer = NewWriter(aux, true);
+			writer.SetUseCompoundFile(false); // use one without a compound file
+			// add 40 documents in separate files
+			AddDocs(writer, 40);
+			Assert.AreEqual(40, writer.DocCount());
+			writer.Close();
+			
+			writer = NewWriter(aux2, true);
+			// add 40 documents in compound files
+			AddDocs2(writer, 50);
+			Assert.AreEqual(50, writer.DocCount());
+			writer.Close();
+			
+			// test doc count before segments are merged
+			writer = NewWriter(dir, false);
+			Assert.AreEqual(100, writer.DocCount());
+			writer.AddIndexesNoOptimize(new Directory[]{aux, aux2});
+			Assert.AreEqual(190, writer.DocCount());
+			writer.Close();
+			
+			// make sure the old index is correct
+			VerifyNumDocs(aux, 40);
+			
+			// make sure the new index is correct
+			VerifyNumDocs(dir, 190);
+			
+			// now add another set in.
+			Directory aux3 = new RAMDirectory();
+			writer = NewWriter(aux3, true);
+			// add 40 documents
+			AddDocs(writer, 40);
+			Assert.AreEqual(40, writer.DocCount());
+			writer.Close();
+			
+			// test doc count before segments are merged/index is optimized
+			writer = NewWriter(dir, false);
+			Assert.AreEqual(190, writer.DocCount());
+			writer.AddIndexesNoOptimize(new Directory[]{aux3});
+			Assert.AreEqual(230, writer.DocCount());
+			writer.Close();
+			
+			// make sure the new index is correct
+			VerifyNumDocs(dir, 230);
+			
+			VerifyTermDocs(dir, new Term("content", "aaa"), 180);
+			
+			VerifyTermDocs(dir, new Term("content", "bbb"), 50);
+			
+			// now optimize it.
+			writer = NewWriter(dir, false);
+			writer.Optimize();
+			writer.Close();
+			
+			// make sure the new index is correct
+			VerifyNumDocs(dir, 230);
+			
+			VerifyTermDocs(dir, new Term("content", "aaa"), 180);
+			
+			VerifyTermDocs(dir, new Term("content", "bbb"), 50);
+			
+			// now add a single document
+			Directory aux4 = new RAMDirectory();
+			writer = NewWriter(aux4, true);
+			AddDocs2(writer, 1);
+			writer.Close();
+			
+			writer = NewWriter(dir, false);
+			Assert.AreEqual(230, writer.DocCount());
+			writer.AddIndexesNoOptimize(new Directory[]{aux4});
+			Assert.AreEqual(231, writer.DocCount());
+			writer.Close();
+			
+			VerifyNumDocs(dir, 231);
+			
+			VerifyTermDocs(dir, new Term("content", "bbb"), 51);
+		}
+		
+		// case 0: add self or exceed maxMergeDocs, expect exception
+        [Test]
+		public virtual void  TestAddSelf()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			IndexWriter writer = null;
+			
+			writer = NewWriter(dir, true);
+			// add 100 documents
+			AddDocs(writer, 100);
+			Assert.AreEqual(100, writer.DocCount());
+			writer.Close();
+			
+			writer = NewWriter(aux, true);
+			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.SetMaxBufferedDocs(1000);
+			// add 140 documents in separate files
+			AddDocs(writer, 40);
+			writer.Close();
+			writer = NewWriter(aux, true);
+			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.SetMaxBufferedDocs(1000);
+			AddDocs(writer, 100);
+			writer.Close();
+			
+			writer = NewWriter(dir, false);
+			int maxMergeDocs = writer.GetMaxMergeDocs();
+			writer.SetMaxMergeDocs(99);
+			
+			try
+			{
+				// upper bound cannot exceed maxMergeDocs
+				writer.AddIndexesNoOptimize(new Directory[]{aux});
+				Assert.IsTrue(false);
+			}
+			catch (System.ArgumentException e)
+			{
+				Assert.AreEqual(100, writer.DocCount());
+			}
+			
+			writer.SetMaxMergeDocs(maxMergeDocs);
+			try
+			{
+				// cannot add self
+				writer.AddIndexesNoOptimize(new Directory[]{aux, dir});
+				Assert.IsTrue(false);
+			}
+			catch (System.ArgumentException e)
+			{
+				Assert.AreEqual(100, writer.DocCount());
+			}
+			writer.Close();
+			
+			// make sure the index is correct
+			VerifyNumDocs(dir, 100);
+		}
+		
+		// in all the remaining tests, make the doc count of the oldest segment
+		// in dir large so that it is never merged in addIndexesNoOptimize()
+		// case 1: no tail segments
+        [Test]
+		public virtual void  TestNoTailSegments()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			
+			IndexWriter writer = NewWriter(dir, false);
+			writer.SetMaxBufferedDocs(10);
+			writer.SetMergeFactor(4);
+			AddDocs(writer, 10);
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux});
+			Assert.AreEqual(1040, writer.DocCount());
+			Assert.AreEqual(2, writer.GetSegmentCount());
+			Assert.AreEqual(1000, writer.GetDocCount(0));
+			writer.Close();
+			
+			// make sure the index is correct
+			VerifyNumDocs(dir, 1040);
+		}
+		
+		// case 2: tail segments, invariants hold, no copy
+        [Test]
+		public virtual void  TestNoCopySegments()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			
+			IndexWriter writer = NewWriter(dir, false);
+			writer.SetMaxBufferedDocs(9);
+			writer.SetMergeFactor(4);
+			AddDocs(writer, 2);
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux});
+			Assert.AreEqual(1032, writer.DocCount());
+			Assert.AreEqual(2, writer.GetSegmentCount());
+			Assert.AreEqual(1000, writer.GetDocCount(0));
+			writer.Close();
+			
+			// make sure the index is correct
+			VerifyNumDocs(dir, 1032);
+		}
+		
+		// case 3: tail segments, invariants hold, copy, invariants hold
+        [Test]
+		public virtual void  TestNoMergeAfterCopy()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			
+			IndexWriter writer = NewWriter(dir, false);
+			writer.SetMaxBufferedDocs(10);
+			writer.SetMergeFactor(4);
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux, aux});
+			Assert.AreEqual(1060, writer.DocCount());
+			Assert.AreEqual(1000, writer.GetDocCount(0));
+			writer.Close();
+			
+			// make sure the index is correct
+			VerifyNumDocs(dir, 1060);
+		}
+		
+		// case 4: tail segments, invariants hold, copy, invariants not hold
+        [Test]
+		public virtual void  TestMergeAfterCopy()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			
+			IndexReader reader = IndexReader.Open(aux);
+			for (int i = 0; i < 20; i++)
+			{
+				reader.DeleteDocument(i);
+			}
+			Assert.AreEqual(10, reader.NumDocs());
+			reader.Close();
+			
+			IndexWriter writer = NewWriter(dir, false);
+			writer.SetMaxBufferedDocs(4);
+			writer.SetMergeFactor(4);
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux, aux});
+			Assert.AreEqual(1020, writer.DocCount());
+			Assert.AreEqual(2, writer.GetSegmentCount());
+			Assert.AreEqual(1000, writer.GetDocCount(0));
+			writer.Close();
+			
+			// make sure the index is correct
+			VerifyNumDocs(dir, 1020);
+		}
+		
+		// case 5: tail segments, invariants not hold
+        [Test]
+		public virtual void  TestMoreMerges()
+		{
+			// main directory
+			Directory dir = new RAMDirectory();
+			// auxiliary directory
+			Directory aux = new RAMDirectory();
+			Directory aux2 = new RAMDirectory();
+			
+			SetUpDirs(dir, aux);
+			
+			IndexWriter writer = NewWriter(aux2, true);
+			writer.SetMaxBufferedDocs(100);
+			writer.SetMergeFactor(10);
+			writer.AddIndexesNoOptimize(new Directory[]{aux});
+			Assert.AreEqual(30, writer.DocCount());
+			Assert.AreEqual(3, writer.GetSegmentCount());
+			writer.Close();
+			
+			IndexReader reader = IndexReader.Open(aux);
+			for (int i = 0; i < 27; i++)
+			{
+				reader.DeleteDocument(i);
+			}
+			Assert.AreEqual(3, reader.NumDocs());
+			reader.Close();
+			
+			reader = IndexReader.Open(aux2);
+			for (int i = 0; i < 8; i++)
+			{
+				reader.DeleteDocument(i);
+			}
+			Assert.AreEqual(22, reader.NumDocs());
+			reader.Close();
+			
+			writer = NewWriter(dir, false);
+			writer.SetMaxBufferedDocs(6);
+			writer.SetMergeFactor(4);
+			
+			writer.AddIndexesNoOptimize(new Directory[]{aux, aux2});
+			Assert.AreEqual(1025, writer.DocCount());
+			Assert.AreEqual(1000, writer.GetDocCount(0));
+			writer.Close();
+			
+			// make sure the index is correct
+			VerifyNumDocs(dir, 1025);
+		}
+		
+		private IndexWriter NewWriter(Directory dir, bool create)
+		{
+			return new IndexWriter(dir, new WhitespaceAnalyzer(), create);
+		}
+		
+		private void  AddDocs(IndexWriter writer, int numDocs)
+		{
+			for (int i = 0; i < numDocs; i++)
+			{
+				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+				writer.AddDocument(doc);
+			}
+		}
+		
+		private void  AddDocs2(IndexWriter writer, int numDocs)
+		{
+			for (int i = 0; i < numDocs; i++)
+			{
+				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				doc.Add(new Field("content", "bbb", Field.Store.NO, Field.Index.TOKENIZED));
+				writer.AddDocument(doc);
+			}
+		}
+		
+		private void  VerifyNumDocs(Directory dir, int numDocs)
+		{
+			IndexReader reader = IndexReader.Open(dir);
+			Assert.AreEqual(numDocs, reader.MaxDoc());
+			Assert.AreEqual(numDocs, reader.NumDocs());
+			reader.Close();
+		}
+		
+		private void  VerifyTermDocs(Directory dir, Term term, int numDocs)
+		{
+			IndexReader reader = IndexReader.Open(dir);
+			TermDocs termDocs = reader.TermDocs(term);
+			int count = 0;
+			while (termDocs.Next())
+				count++;
+			Assert.AreEqual(numDocs, count);
+			reader.Close();
+		}
+		
+		private void  SetUpDirs(Directory dir, Directory aux)
+		{
+			IndexWriter writer = null;
+			
+			writer = NewWriter(dir, true);
+			writer.SetMaxBufferedDocs(1000);
+			// add 1000 documents
+			AddDocs(writer, 1000);
+			Assert.AreEqual(1000, writer.DocCount());
+			Assert.AreEqual(1, writer.GetSegmentCount());
+			writer.Close();
+			
+			writer = NewWriter(aux, true);
+			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.SetMaxBufferedDocs(100);
+			writer.SetMergeFactor(10);
+			// add 30 documents in 3 segments
+			for (int i = 0; i < 3; i++)
+			{
+				AddDocs(writer, 10);
+				writer.Close();
+				writer = NewWriter(aux, false);
+				writer.SetUseCompoundFile(false); // use one without a compound file
+				writer.SetMaxBufferedDocs(100);
+				writer.SetMergeFactor(10);
+			}
+			Assert.AreEqual(30, writer.DocCount());
+			Assert.AreEqual(3, writer.GetSegmentCount());
+			writer.Close();
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestBackwardsCompatibility.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestBackwardsCompatibility.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,456 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using Hits = Lucene.Net.Search.Hits;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+
+namespace Lucene.Net.Index
+{
+	
+	/*
+	Verify we can read the pre-XXX file format, do searches
+	against it, and add documents to it.*/
+	
+    [TestFixture]
+	public class TestBackwardsCompatibility
+	{
+		
+		// Uncomment these cases & run in a pre-lockless checkout
+		// to create indices:
+		
+		/*
+		public void testCreatePreLocklessCFS() throws IOException {
+		CreateIndex("src/test/org/apache/lucene/index/index.prelockless.cfs", true);
+		}
+		
+		public void testCreatePreLocklessNoCFS() throws IOException {
+		CreateIndex("src/test/org/apache/lucene/index/index.prelockless.nocfs", false);
+		}
+		*/
+		
+		/* Unzips dirName + ".zip" --> dirName, removing dirName
+		first */
+		public virtual void  Unzip(System.String dirName)
+		{
+            Assert.Fail("needs integration with SharpZipLib");
+
+            /*
+			RmDir(dirName);
+			
+			System.Collections.IEnumerator entries;
+			ZipFile zipFile;
+			zipFile = new ZipFile(dirName + ".zip");
+			
+			entries = zipFile.Entries();
+			System.IO.FileInfo fileDir = new System.IO.FileInfo(dirName);
+			System.IO.Directory.CreateDirectory(fileDir.FullName);
+			
+			while (entries.MoveNext())
+			{
+				ZipEntry entry = (ZipEntry) entries.Current;
+				
+				System.IO.Stream in_Renamed = zipFile.GetInputStream(entry);
+				System.IO.Stream out_Renamed = new System.IO.BufferedStream(new System.IO.FileStream(new System.IO.FileInfo(System.IO.Path.Combine(fileDir.FullName, entry.getName())).FullName, System.IO.FileMode.Create));
+				
+				byte[] buffer = new byte[8192];
+				int len;
+				while ((len = SupportClass.ReadInput(in_Renamed, buffer, 0, buffer.Length)) >= 0)
+				{
+					out_Renamed.Write(SupportClass.ToByteArray(buffer), 0, len);
+				}
+				
+				in_Renamed.Close();
+				out_Renamed.Close();
+			}
+			
+			zipFile.Close();
+            */
+		}
+		
+        [Test]
+		public virtual void  TestCreateCFS()
+		{
+			System.String dirName = "testindex.cfs";
+			CreateIndex(dirName, true);
+			RmDir(dirName);
+		}
+		
+        [Test]
+		public virtual void  TestCreateNoCFS()
+		{
+			System.String dirName = "testindex.nocfs";
+			CreateIndex(dirName, true);
+			RmDir(dirName);
+		}
+		
+        [Test]
+		public virtual void  TestSearchOldIndex()
+		{
+			System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs"};
+			for (int i = 0; i < oldNames.Length; i++)
+			{
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
+				Unzip(dirName);
+				SearchIndex(dirName);
+				RmDir(dirName);
+			}
+		}
+		
+        [Test]
+		public virtual void  TestIndexOldIndexNoAdds()
+		{
+			System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs"};
+			for (int i = 0; i < oldNames.Length; i++)
+			{
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
+				Unzip(dirName);
+				ChangeIndexNoAdds(dirName);
+				RmDir(dirName);
+			}
+		}
+		
+        [Test]
+		public virtual void  TestIndexOldIndex()
+		{
+			System.String[] oldNames = new System.String[]{"prelockless.cfs", "prelockless.nocfs"};
+			for (int i = 0; i < oldNames.Length; i++)
+			{
+				System.String dirName = "src/test/org/apache/lucene/index/index." + oldNames[i];
+				Unzip(dirName);
+				ChangeIndexWithAdds(dirName);
+				RmDir(dirName);
+			}
+		}
+		
+		public virtual void  SearchIndex(System.String dirName)
+		{
+			//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer());
+			//Query query = parser.parse("handle:1");
+			
+			Directory dir = FSDirectory.GetDirectory(dirName);
+			IndexSearcher searcher = new IndexSearcher(dir);
+			
+			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(34, hits.Length());
+			Lucene.Net.Documents.Document d = hits.Doc(0);
+			
+			// First document should be #21 since it's norm was increased:
+			Assert.AreEqual("21", d.Get("id"), "didn't get the right document first");
+			
+			searcher.Close();
+			dir.Close();
+		}
+		
+		/* Open pre-lockless index, add docs, do a delete &
+		* setNorm, and search */
+		public virtual void  ChangeIndexWithAdds(System.String dirName)
+		{
+			
+			Directory dir = FSDirectory.GetDirectory(dirName);
+			// open writer
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			
+			// add 10 docs
+			for (int i = 0; i < 10; i++)
+			{
+				AddDoc(writer, 35 + i);
+			}
+			
+			// make sure writer sees right total -- writer seems not to know about deletes in .del?
+			Assert.AreEqual(45, writer.DocCount(), "wrong doc count");
+			writer.Close();
+			
+			// make sure searching sees right # hits
+			IndexSearcher searcher = new IndexSearcher(dir);
+			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(44, hits.Length(), "wrong number of hits");
+			Lucene.Net.Documents.Document d = hits.Doc(0);
+			Assert.AreEqual("21", d.Get("id"), "wrong first document");
+			searcher.Close();
+			
+			// make sure we can do another delete & another setNorm against this
+			// pre-lockless segment:
+			IndexReader reader = IndexReader.Open(dir);
+			Term searchTerm = new Term("id", "6");
+			int delCount = reader.DeleteDocuments(searchTerm);
+			Assert.AreEqual(1, delCount, "wrong delete count");
+			reader.SetNorm(22, "content", (float) 2.0);
+			reader.Close();
+			
+			// make sure 2nd delete & 2nd norm "took":
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(43, hits.Length(), "wrong number of hits");
+			d = hits.Doc(0);
+			Assert.AreEqual("22", d.Get("id"), "wrong first document");
+			searcher.Close();
+			
+			// optimize
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer.Optimize();
+			writer.Close();
+			
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(43, hits.Length(), "wrong number of hits");
+			d = hits.Doc(0);
+			Assert.AreEqual("22", d.Get("id"), "wrong first document");
+			searcher.Close();
+			
+			dir.Close();
+		}
+		
+		/* Open pre-lockless index, add docs, do a delete &
+		* setNorm, and search */
+		public virtual void  ChangeIndexNoAdds(System.String dirName)
+		{
+			
+			Directory dir = FSDirectory.GetDirectory(dirName);
+			
+			// make sure searching sees right # hits
+			IndexSearcher searcher = new IndexSearcher(dir);
+			Hits hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(34, hits.Length(), "wrong number of hits");
+			Lucene.Net.Documents.Document d = hits.Doc(0);
+			Assert.AreEqual("21", d.Get("id"), "wrong first document");
+			searcher.Close();
+			
+			// make sure we can do another delete & another setNorm against this
+			// pre-lockless segment:
+			IndexReader reader = IndexReader.Open(dir);
+			Term searchTerm = new Term("id", "6");
+			int delCount = reader.DeleteDocuments(searchTerm);
+			Assert.AreEqual(1, delCount, "wrong delete count");
+			reader.SetNorm(22, "content", (float) 2.0);
+			reader.Close();
+			
+			// make sure 2nd delete & 2nd norm "took":
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(33, hits.Length(), "wrong number of hits");
+			d = hits.Doc(0);
+			Assert.AreEqual("22", d.Get("id"), "wrong first document");
+			searcher.Close();
+			
+			// optimize
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer.Optimize();
+			writer.Close();
+			
+			searcher = new IndexSearcher(dir);
+			hits = searcher.Search(new TermQuery(new Term("content", "aaa")));
+			Assert.AreEqual(33, hits.Length(), "wrong number of hits");
+			d = hits.Doc(0);
+			Assert.AreEqual("22", d.Get("id"), "wrong first document");
+			searcher.Close();
+			
+			dir.Close();
+		}
+		
+		public virtual void  CreateIndex(System.String dirName, bool doCFS)
+		{
+			
+			Directory dir = FSDirectory.GetDirectory(dirName);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			writer.SetUseCompoundFile(doCFS);
+			
+			for (int i = 0; i < 35; i++)
+			{
+				AddDoc(writer, i);
+			}
+			Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
+			writer.Close();
+			
+			// Delete one doc so we get a .del file:
+			IndexReader reader = IndexReader.Open(dir);
+			Term searchTerm = new Term("id", "7");
+			int delCount = reader.DeleteDocuments(searchTerm);
+			Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
+			
+			// Set one norm so we get a .s0 file:
+			reader.SetNorm(21, "content", (float) 1.5);
+			reader.Close();
+		}
+		
+		/* Verifies that the expected file names were produced */
+		
+		// disable until hardcoded file names are fixes:
+        [Test]
+		public virtual void  TestExactFileNames()
+		{
+			
+			System.String outputDir = "lucene.backwardscompat0.index";
+			Directory dir = FSDirectory.GetDirectory(outputDir);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			for (int i = 0; i < 35; i++)
+			{
+				AddDoc(writer, i);
+			}
+			Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
+			writer.Close();
+			
+			// Delete one doc so we get a .del file:
+			IndexReader reader = IndexReader.Open(dir);
+			Term searchTerm = new Term("id", "7");
+			int delCount = reader.DeleteDocuments(searchTerm);
+			Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
+			
+			// Set one norm so we get a .s0 file:
+			reader.SetNorm(21, "content", (float) 1.5);
+			reader.Close();
+			
+			// The numbering of fields can vary depending on which
+			// JRE is in use.  On some JREs we see content bound to
+			// field 0; on others, field 1.  So, here we have to
+			// figure out which field number corresponds to
+			// "content", and then set our expected file names below
+			// accordingly:
+			CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs");
+			FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
+			int contentFieldIndex = - 1;
+			for (int i = 0; i < fieldInfos.Size(); i++)
+			{
+				FieldInfo fi = fieldInfos.FieldInfo(i);
+				if (fi.Name.Equals("content"))
+				{
+					contentFieldIndex = i;
+					break;
+				}
+			}
+			cfsReader.Close();
+			Assert.IsTrue(contentFieldIndex != - 1, "could not locate the 'content' field number in the _2.cfs segment");
+			
+			// Now verify file names:
+			System.String[] expected = new System.String[]{"_0.cfs", "_0_1.del", "_1.cfs", "_2.cfs", "_2_1.s" + contentFieldIndex, "_3.cfs", "segments_a", "segments.gen"};
+			
+			System.String[] actual = dir.List();
+			System.Array.Sort(expected);
+			System.Array.Sort(actual);
+			if (!ArrayEquals(expected, actual))
+			{
+				Assert.Fail("incorrect filenames in index: expected:\n    " + AsString(expected) + "\n  actual:\n    " + AsString(actual));
+			}
+			dir.Close();
+			
+			RmDir(outputDir);
+		}
+		
+		private System.String AsString(System.String[] l)
+		{
+			System.String s = "";
+			for (int i = 0; i < l.Length; i++)
+			{
+				if (i > 0)
+				{
+					s += "\n    ";
+				}
+				s += l[i];
+			}
+			return s;
+		}
+		
+		private void  AddDoc(IndexWriter writer, int id)
+		{
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			writer.AddDocument(doc);
+		}
+		
+		private void  RmDir(System.String dir)
+		{
+			System.IO.FileInfo fileDir = new System.IO.FileInfo(dir);
+			bool tmpBool;
+			if (System.IO.File.Exists(fileDir.FullName))
+				tmpBool = true;
+			else
+				tmpBool = System.IO.Directory.Exists(fileDir.FullName);
+			if (tmpBool)
+			{
+                System.String[] files = System.IO.Directory.GetFileSystemEntries(fileDir.FullName);
+				if (files != null)
+				{
+					for (int i = 0; i < files.Length; i++)
+					{
+						bool tmpBool2;
+						if (System.IO.File.Exists(files[i]))
+						{
+							System.IO.File.Delete(files[i]);
+							tmpBool2 = true;
+						}
+						else if (System.IO.Directory.Exists(files[i]))
+						{
+							System.IO.Directory.Delete(files[i]);
+							tmpBool2 = true;
+						}
+						else
+							tmpBool2 = false;
+						bool generatedAux = tmpBool2;
+					}
+				}
+				bool tmpBool3;
+				if (System.IO.File.Exists(fileDir.FullName))
+				{
+					System.IO.File.Delete(fileDir.FullName);
+					tmpBool3 = true;
+				}
+				else if (System.IO.Directory.Exists(fileDir.FullName))
+				{
+					System.IO.Directory.Delete(fileDir.FullName);
+					tmpBool3 = true;
+				}
+				else
+					tmpBool3 = false;
+				bool generatedAux2 = tmpBool3;
+			}
+		}
+
+        public static bool ArrayEquals(System.Array array1, System.Array array2)
+        {
+            bool result = false;
+            if ((array1 == null) && (array2 == null))
+                result = true;
+            else if ((array1 != null) && (array2 != null))
+            {
+                if (array1.Length == array2.Length)
+                {
+                    int length = array1.Length;
+                    result = true;
+                    for (int index = 0; index < length; index++)
+                    {
+                        if (!(array1.GetValue(index).Equals(array2.GetValue(index))))
+                        {
+                            result = false;
+                            break;
+                        }
+                    }
+                }
+            }
+            return result;
+        }
+    }
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestCompoundFile.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestCompoundFile.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
 using IndexInput = Lucene.Net.Store.IndexInput;
@@ -58,9 +60,10 @@
 		[SetUp]
 		public virtual void  SetUp()
 		{
-			//dir = new RAMDirectory();
-            dir = FSDirectory.GetDirectory(new System.IO.FileInfo(System.Configuration.ConfigurationSettings.AppSettings.Get("tempDir") + "\\" + "testIndex"), true);
-		}
+            System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testIndex"));
+            Lucene.Net.Util._TestUtil.RmDir(file);
+            dir = FSDirectory.GetDirectory(file);
+        }
 		
 		
 		/// <summary>Creates a file of the specified size with random data. </summary>
@@ -665,6 +668,5 @@
                 os.Close();
             }
         }
-
-	}
+ 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDoc.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDoc.cs Sat Aug 11 09:56:37 2007
@@ -16,14 +16,16 @@
  */
 
 using System;
+
 using NUnit.Framework;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
+
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using FileDocument = Lucene.Net.Demo.FileDocument;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using Directory = Lucene.Net.Store.Directory;
 using Document = Lucene.Net.Documents.Document;
 using Similarity = Lucene.Net.Search.Similarity;
-using Directory = Lucene.Net.Store.Directory;
-using FSDirectory = Lucene.Net.Store.FSDirectory;
+using FileDocument = Lucene.Net.Demo.FileDocument;
 
 namespace Lucene.Net.Index
 {
@@ -114,8 +116,8 @@
 				{
 					pw.Close();
 				}
-				//if (fw != null)
-				//	fw.Close();
+				if (fw != null)
+					fw.Close();
 			}
 		}
 		
@@ -137,20 +139,20 @@
 			Directory directory = FSDirectory.GetDirectory(indexDir, true);
 			directory.Close();
 			
-			IndexDoc("one", "test.txt");
-			PrintSegment(out_Renamed, "one");
+            SegmentInfo si1 = IndexDoc("one", "test.txt");
+            PrintSegment(out_Renamed, si1);
 			
-			IndexDoc("two", "test2.txt");
-			PrintSegment(out_Renamed, "two");
+            SegmentInfo si2 = IndexDoc("two", "test2.txt");
+            PrintSegment(out_Renamed, si2);
 			
-			Merge("one", "two", "merge", false);
-			PrintSegment(out_Renamed, "merge");
+            SegmentInfo siMerge = Merge(si1, si2, "merge", false);
+            PrintSegment(out_Renamed, siMerge);
 			
-			Merge("one", "two", "merge2", false);
-			PrintSegment(out_Renamed, "merge2");
+            SegmentInfo siMerge2 = Merge(si1, si2, "merge2", false);
+            PrintSegment(out_Renamed, siMerge2);
 			
-			Merge("merge", "merge2", "merge3", false);
-			PrintSegment(out_Renamed, "merge3");
+            SegmentInfo siMerge3 = Merge(siMerge, siMerge2, "merge3", false);
+            PrintSegment(out_Renamed, siMerge3);
 			
 			out_Renamed.Close();
 			sw.Close();
@@ -163,20 +165,20 @@
 			directory = FSDirectory.GetDirectory(indexDir, true);
 			directory.Close();
 			
-			IndexDoc("one", "test.txt");
-			PrintSegment(out_Renamed, "one");
+            si1 = IndexDoc("one", "test.txt");
+            PrintSegment(out_Renamed, si1);
 			
-			IndexDoc("two", "test2.txt");
-			PrintSegment(out_Renamed, "two");
+            si2 = IndexDoc("two", "test2.txt");
+            PrintSegment(out_Renamed, si2);
 			
-			Merge("one", "two", "merge", true);
-			PrintSegment(out_Renamed, "merge");
+            siMerge = Merge(si1, si2, "merge", true);
+            PrintSegment(out_Renamed, siMerge);
 			
-			Merge("one", "two", "merge2", true);
-			PrintSegment(out_Renamed, "merge2");
+            siMerge2 = Merge(si1, si2, "merge2", true);
+            PrintSegment(out_Renamed, siMerge2);
 			
-			Merge("merge", "merge2", "merge3", true);
-			PrintSegment(out_Renamed, "merge3");
+            siMerge3 = Merge(siMerge, siMerge2, "merge3", true);
+            PrintSegment(out_Renamed, siMerge3);
 			
 			out_Renamed.Close();
 			sw.Close();
@@ -186,7 +188,7 @@
 		}
 		
 		
-		private void  IndexDoc(System.String segment, System.String fileName)
+		private SegmentInfo IndexDoc(System.String segment, System.String fileName)
 		{
 			Directory directory = FSDirectory.GetDirectory(indexDir, false);
 			Analyzer analyzer = new SimpleAnalyzer();
@@ -198,15 +200,16 @@
 			writer.AddDocument(segment, doc);
 			
 			directory.Close();
+            return new SegmentInfo(segment, 1, directory, false, false);
 		}
 		
 		
-		private void  Merge(System.String seg1, System.String seg2, System.String merged, bool useCompoundFile)
+		private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
 		{
 			Directory directory = FSDirectory.GetDirectory(indexDir, false);
 			
-			SegmentReader r1 = SegmentReader.Get(new SegmentInfo(seg1, 1, directory));
-			SegmentReader r2 = SegmentReader.Get(new SegmentInfo(seg2, 1, directory));
+			SegmentReader r1 = SegmentReader.Get(si1);
+			SegmentReader r2 = SegmentReader.Get(si2);
 			
 			SegmentMerger merger = new SegmentMerger(directory, merged);
 			
@@ -225,13 +228,14 @@
 			}
 			
 			directory.Close();
-		}
+            return new SegmentInfo(merged, si1.docCount + si2.docCount, directory, useCompoundFile, true);
+        }
 		
 		
-		private void  PrintSegment(System.IO.StreamWriter out_Renamed, System.String segment)
+		private void  PrintSegment(System.IO.StreamWriter out_Renamed, SegmentInfo si)
 		{
 			Directory directory = FSDirectory.GetDirectory(indexDir, false);
-			SegmentReader reader = SegmentReader.Get(new SegmentInfo(segment, 1, directory));
+			SegmentReader reader = SegmentReader.Get(si);
 			
 			for (int i = 0; i < reader.NumDocs(); i++)
 			{

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestDocumentWriter.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs Sat Aug 11 09:56:37 2007
@@ -16,13 +16,14 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using Analyzer = Lucene.Net.Analysis.Analyzer;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
 using WhitespaceTokenizer = Lucene.Net.Analysis.WhitespaceTokenizer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Documents;
 using Similarity = Lucene.Net.Search.Similarity;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
@@ -63,9 +64,12 @@
 			}
 		}
 		private RAMDirectory dir;
-
-
-		[SetUp]
+		
+        // public TestDocumentWriter(System.String s)
+        // {
+        // }
+		
+        [SetUp]
         public virtual void  SetUp()
 		{
 			dir = new RAMDirectory();

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldInfos.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldInfos.cs Sat Aug 11 09:56:37 2007
@@ -16,10 +16,12 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using Document = Lucene.Net.Documents.Document;
-using IndexOutput = Lucene.Net.Store.IndexOutput;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using IndexOutput = Lucene.Net.Store.IndexOutput;
 
 namespace Lucene.Net.Index
 {
@@ -32,7 +34,11 @@
 		
 		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		
-		[SetUp]
+        // public TestFieldInfos(System.String s)
+        // {
+        // }
+		
+        [SetUp]
         public virtual void  SetUp()
 		{
 			DocHelper.SetupDoc(testDoc);

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFieldsReader.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFieldsReader.cs Sat Aug 11 09:56:37 2007
@@ -16,12 +16,15 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Documents;
 using Similarity = Lucene.Net.Search.Similarity;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using _TestUtil = Lucene.Net.Util._TestUtil;
 
 namespace Lucene.Net.Index
 {
@@ -29,12 +32,44 @@
 	[TestFixture]
 	public class TestFieldsReader
 	{
-		private RAMDirectory dir = new RAMDirectory();
+        private class AnonymousClassFieldSelector : FieldSelector
+        {
+            public AnonymousClassFieldSelector(TestFieldsReader enclosingInstance)
+            {
+                InitBlock(enclosingInstance);
+            }
+            private void  InitBlock(TestFieldsReader enclosingInstance)
+            {
+                this.enclosingInstance = enclosingInstance;
+            }
+            private TestFieldsReader enclosingInstance;
+            public TestFieldsReader Enclosing_Instance
+            {
+                get
+                {
+                    return enclosingInstance;
+                }
+				
+            }
+            public virtual FieldSelectorResult Accept(System.String fieldName)
+            {
+                if (fieldName.Equals(DocHelper.TEXT_FIELD_1_KEY) || fieldName.Equals(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY) || fieldName.Equals(DocHelper.LAZY_FIELD_BINARY_KEY))
+                    return FieldSelectorResult.SIZE;
+                else if (fieldName.Equals(DocHelper.TEXT_FIELD_3_KEY))
+                    return FieldSelectorResult.LOAD;
+                else
+                    return FieldSelectorResult.NO_LOAD;
+            }
+        }
+        private RAMDirectory dir = new RAMDirectory();
 		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		private FieldInfos fieldInfos = null;
 		
+        // public TestFieldsReader(System.String s)
+        // {
+        // }
 		
-		[SetUp]
+        [SetUp]
         public virtual void  SetUp()
 		{
 			fieldInfos = new FieldInfos();
@@ -53,11 +88,11 @@
 			FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
 			Assert.IsTrue(reader != null);
 			Assert.IsTrue(reader.Size() == 1);
-			Lucene.Net.Documents.Document doc = reader.Doc(0);
+			Lucene.Net.Documents.Document doc = reader.Doc(0, null);
 			Assert.IsTrue(doc != null);
-			Assert.IsTrue(doc.GetField("textField1") != null);
+			Assert.IsTrue(doc.GetField(DocHelper.TEXT_FIELD_1_KEY) != null);
 			
-			Field field = doc.GetField("textField2");
+			Field field = doc.GetField(DocHelper.TEXT_FIELD_2_KEY);
 			Assert.IsTrue(field != null);
 			Assert.IsTrue(field.IsTermVectorStored() == true);
 			
@@ -65,7 +100,7 @@
 			Assert.IsTrue(field.IsStorePositionWithTermVector() == true);
 			Assert.IsTrue(field.GetOmitNorms() == false);
 			
-			field = doc.GetField("textField3");
+			field = doc.GetField(DocHelper.TEXT_FIELD_3_KEY);
 			Assert.IsTrue(field != null);
 			Assert.IsTrue(field.IsTermVectorStored() == false);
 			Assert.IsTrue(field.IsStoreOffsetWithTermVector() == false);
@@ -75,5 +110,184 @@
 			
 			reader.Close();
 		}
-	}
+		
+		[Test]
+        public virtual void  TestLazyFields()
+        {
+            Assert.IsTrue(dir != null);
+            Assert.IsTrue(fieldInfos != null);
+            FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
+            Assert.IsTrue(reader != null);
+            Assert.IsTrue(reader.Size() == 1);
+            System.Collections.Hashtable loadFieldNames = new System.Collections.Hashtable();
+            loadFieldNames.Add(DocHelper.TEXT_FIELD_1_KEY, DocHelper.TEXT_FIELD_1_KEY);
+            loadFieldNames.Add(DocHelper.TEXT_FIELD_UTF1_KEY, DocHelper.TEXT_FIELD_UTF1_KEY);
+            System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable();
+            //new String[]{DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_BINARY_KEY};
+            lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY);
+            lazyFieldNames.Add(DocHelper.LAZY_FIELD_KEY, DocHelper.LAZY_FIELD_KEY);
+            lazyFieldNames.Add(DocHelper.LAZY_FIELD_BINARY_KEY, DocHelper.LAZY_FIELD_BINARY_KEY);
+            lazyFieldNames.Add(DocHelper.TEXT_FIELD_UTF2_KEY, DocHelper.TEXT_FIELD_UTF2_KEY);
+            lazyFieldNames.Add(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY, DocHelper.COMPRESSED_TEXT_FIELD_2_KEY);
+            SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(loadFieldNames, lazyFieldNames);
+            Lucene.Net.Documents.Document doc = reader.Doc(0, fieldSelector);
+            Assert.IsTrue(doc != null, "doc is null and it shouldn't be");
+            Fieldable field = doc.GetFieldable(DocHelper.LAZY_FIELD_KEY);
+            Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be");
+            System.String value_Renamed = field.StringValue();
+            Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
+            Assert.IsTrue(value_Renamed.Equals(DocHelper.LAZY_FIELD_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.LAZY_FIELD_TEXT);
+            field = doc.GetFieldable(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY);
+            Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            Assert.IsTrue(field.IsLazy(), "field is not lazy and it should be");
+            value_Renamed = field.StringValue();
+            Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
+            Assert.IsTrue(value_Renamed.Equals(DocHelper.FIELD_2_COMPRESSED_TEXT) == true, value_Renamed + " is not equal to " + DocHelper.FIELD_2_COMPRESSED_TEXT);
+            field = doc.GetFieldable(DocHelper.TEXT_FIELD_1_KEY);
+            Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            Assert.IsTrue(field.IsLazy() == false, "Field is lazy and it should not be");
+            field = doc.GetFieldable(DocHelper.TEXT_FIELD_UTF1_KEY);
+            Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            Assert.IsTrue(field.IsLazy() == false, "Field is lazy and it should not be");
+            Assert.IsTrue(field.StringValue().Equals(DocHelper.FIELD_UTF1_TEXT) == true, field.StringValue() + " is not equal to " + DocHelper.FIELD_UTF1_TEXT);
+			
+            field = doc.GetFieldable(DocHelper.TEXT_FIELD_UTF2_KEY);
+            Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            Assert.IsTrue(field.IsLazy() == true, "Field is lazy and it should not be");
+            Assert.IsTrue(field.StringValue().Equals(DocHelper.FIELD_UTF2_TEXT) == true, field.StringValue() + " is not equal to " + DocHelper.FIELD_UTF2_TEXT);
+			
+            field = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY);
+            Assert.IsTrue(field != null, "field is null and it shouldn't be");
+            byte[] bytes = field.BinaryValue();
+            Assert.IsTrue(bytes != null, "bytes is null and it shouldn't be");
+            Assert.IsTrue(DocHelper.LAZY_FIELD_BINARY_BYTES.Length == bytes.Length, "");
+            for (int i = 0; i < bytes.Length; i++)
+            {
+                Assert.IsTrue(bytes[i] == DocHelper.LAZY_FIELD_BINARY_BYTES[i], "byte[" + i + "] is mismatched");
+            }
+        }
+		
+        [Test]
+        public virtual void  TestLoadFirst()
+        {
+            Assert.IsTrue(dir != null);
+            Assert.IsTrue(fieldInfos != null);
+            FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
+            Assert.IsTrue(reader != null);
+            Assert.IsTrue(reader.Size() == 1);
+            LoadFirstFieldSelector fieldSelector = new LoadFirstFieldSelector();
+            Lucene.Net.Documents.Document doc = reader.Doc(0, fieldSelector);
+            Assert.IsTrue(doc != null, "doc is null and it shouldn't be");
+            int count = 0;
+            System.Collections.IList l = doc.GetFields();
+            for (System.Collections.IEnumerator iter = l.GetEnumerator(); iter.MoveNext(); )
+            {
+                Field field = (Field) iter.Current;
+                Assert.IsTrue(field != null, "field is null and it shouldn't be");
+                System.String sv = field.StringValue();
+                Assert.IsTrue(sv != null, "sv is null and it shouldn't be");
+                count++;
+            }
+            Assert.IsTrue(count == 1, count + " does not equal: " + 1);
+        }
+		
+        /// <summary> Not really a test per se, but we should have some way of assessing whether this is worthwhile.
+        /// <p/>
+        /// Must test using a File based directory
+        /// 
+        /// </summary>
+        /// <throws>  Exception </throws>
+        [Test]
+        public virtual void  TestLazyPerformance()
+        {
+            System.String tmpIODir = SupportClass.AppSettings.Get("tempDir", "");
+            System.String userName = System.Environment.UserName;
+            System.String path = tmpIODir + System.IO.Path.DirectorySeparatorChar.ToString() + "lazyDir" + userName;
+            System.IO.FileInfo file = new System.IO.FileInfo(path);
+            _TestUtil.RmDir(file);
+            FSDirectory tmpDir = FSDirectory.GetDirectory(file);
+            Assert.IsTrue(tmpDir != null);
+            DocumentWriter writer = new DocumentWriter(tmpDir, new WhitespaceAnalyzer(), Similarity.GetDefault(), 50);
+            Assert.IsTrue(writer != null);
+            writer.AddDocument("test", testDoc);
+            Assert.IsTrue(fieldInfos != null);
+            FieldsReader reader;
+            long lazyTime = 0;
+            long regularTime = 0;
+            int length = 50;
+            System.Collections.Hashtable lazyFieldNames = new System.Collections.Hashtable();
+            lazyFieldNames.Add(DocHelper.LARGE_LAZY_FIELD_KEY, DocHelper.LARGE_LAZY_FIELD_KEY);
+            SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(new System.Collections.Hashtable(), lazyFieldNames);
+			
+            for (int i = 0; i < length; i++)
+            {
+                reader = new FieldsReader(tmpDir, "test", fieldInfos);
+                Assert.IsTrue(reader != null);
+                Assert.IsTrue(reader.Size() == 1);
+				
+                Lucene.Net.Documents.Document doc;
+                doc = reader.Doc(0, null); //Load all of them
+                Assert.IsTrue(doc != null, "doc is null and it shouldn't be");
+                Fieldable field = doc.GetFieldable(DocHelper.LARGE_LAZY_FIELD_KEY);
+                Assert.IsTrue(field.IsLazy() == false, "field is lazy");
+                System.String value_Renamed;
+                long start;
+                long finish;
+                start = System.DateTime.Now.Millisecond;
+                //On my machine this was always 0ms.
+                value_Renamed = field.StringValue();
+                finish = System.DateTime.Now.Millisecond;
+                Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
+                Assert.IsTrue(field != null, "field is null and it shouldn't be");
+                regularTime += (finish - start);
+                reader.Close();
+                reader = null;
+                doc = null;
+                //Hmmm, are we still in cache???
+                System.GC.Collect();
+                reader = new FieldsReader(tmpDir, "test", fieldInfos);
+                doc = reader.Doc(0, fieldSelector);
+                field = doc.GetFieldable(DocHelper.LARGE_LAZY_FIELD_KEY);
+                Assert.IsTrue(field.IsLazy() == true, "field is not lazy");
+                start = System.DateTime.Now.Millisecond;
+                //On my machine this took around 50 - 70ms
+                value_Renamed = field.StringValue();
+                finish = System.DateTime.Now.Millisecond;
+                Assert.IsTrue(value_Renamed != null, "value is null and it shouldn't be");
+                lazyTime += (finish - start);
+                reader.Close();
+            }
+            System.Console.Out.WriteLine("Average Non-lazy time (should be very close to zero): " + regularTime / length + " ms for " + length + " reads");
+            System.Console.Out.WriteLine("Average Lazy Time (should be greater than zero): " + lazyTime / length + " ms for " + length + " reads");
+        }
+		
+        [Test]
+        public virtual void  TestLoadSize()
+        {
+            FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
+            Lucene.Net.Documents.Document doc;
+			
+            doc = reader.Doc(0, new AnonymousClassFieldSelector(this));
+            Fieldable f1 = doc.GetFieldable(DocHelper.TEXT_FIELD_1_KEY);
+            Fieldable f3 = doc.GetFieldable(DocHelper.TEXT_FIELD_3_KEY);
+            Fieldable fb = doc.GetFieldable(DocHelper.LAZY_FIELD_BINARY_KEY);
+            Assert.IsTrue(f1.IsBinary());
+            Assert.IsTrue(!f3.IsBinary());
+            Assert.IsTrue(fb.IsBinary());
+            AssertSizeEquals(2 * DocHelper.FIELD_1_TEXT.Length, f1.BinaryValue());
+            Assert.AreEqual(DocHelper.FIELD_3_TEXT, f3.StringValue());
+            AssertSizeEquals(DocHelper.LAZY_FIELD_BINARY_BYTES.Length, fb.BinaryValue());
+			
+            reader.Close();
+        }
+		
+        private void  AssertSizeEquals(int size, byte[] sizebytes)
+        {
+            Assert.AreEqual((byte) (size >> 24), sizebytes[0]);
+            Assert.AreEqual((byte) (size >> 16), sizebytes[1]);
+            Assert.AreEqual((byte) (size >> 8), sizebytes[2]);
+            Assert.AreEqual((byte) size, sizebytes[3]);
+        }
+    }
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestFilterIndexReader.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestFilterIndexReader.cs Sat Aug 11 09:56:37 2007
@@ -16,11 +16,13 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Index
 {

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexFileDeleter.cs?view=auto&rev=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexFileDeleter.cs Sat Aug 11 09:56:37 2007
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using IndexSearcher = Lucene.Net.Search.IndexSearcher;
+using TermQuery = Lucene.Net.Search.TermQuery;
+using Hits = Lucene.Net.Search.Hits;
+using Directory = Lucene.Net.Store.Directory;
+using IndexInput = Lucene.Net.Store.IndexInput;
+using IndexOutput = Lucene.Net.Store.IndexOutput;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+
+namespace Lucene.Net.Index
+{
+	
+	/*
+	Verify we can read the pre-2.1 file format, do searches
+	against it, and add documents to it.*/
+	
+    [TestFixture]
+    public class TestIndexFileDeleter
+	{
+        [Test]
+		public virtual void  TestDeleteLeftoverFiles()
+		{
+			
+			Directory dir = new RAMDirectory();
+			
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			int i;
+			for (i = 0; i < 35; i++)
+			{
+				AddDoc(writer, i);
+			}
+			writer.SetUseCompoundFile(false);
+			for (; i < 45; i++)
+			{
+				AddDoc(writer, i);
+			}
+			writer.Close();
+			
+			// Delete one doc so we get a .del file:
+			IndexReader reader = IndexReader.Open(dir);
+			Term searchTerm = new Term("id", "7");
+			int delCount = reader.DeleteDocuments(searchTerm);
+			Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
+			
+			// Set one norm so we get a .s0 file:
+			reader.SetNorm(21, "content", (float) 1.5);
+			reader.Close();
+			
+			// Now, artificially create an extra .del file & extra
+			// .s0 file:
+			System.String[] files = dir.List();
+			
+			/*
+			for(int i=0;i<files.length;i++) {
+			System.out.println(i + ": " + files[i]);
+			}
+			*/
+			
+			// The numbering of fields can vary depending on which
+			// JRE is in use.  On some JREs we see content bound to
+			// field 0; on others, field 1.  So, here we have to
+			// figure out which field number corresponds to
+			// "content", and then set our expected file names below
+			// accordingly:
+			CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs");
+			FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
+			int contentFieldIndex = - 1;
+			for (i = 0; i < fieldInfos.Size(); i++)
+			{
+				FieldInfo fi = fieldInfos.FieldInfo(i);
+				if (fi.Name.Equals("content"))
+				{
+					contentFieldIndex = i;
+					break;
+				}
+			}
+			cfsReader.Close();
+			Assert.IsTrue(contentFieldIndex != - 1, "could not locate the 'content' field number in the _2.cfs segment");
+			
+			System.String normSuffix = "s" + contentFieldIndex;
+			
+			// Create a bogus separate norms file for a
+			// segment/field that actually has a separate norms file
+			// already:
+			CopyFile(dir, "_2_1." + normSuffix, "_2_2." + normSuffix);
+			
+			// Create a bogus separate norms file for a
+			// segment/field that actually has a separate norms file
+			// already, using the "not compound file" extension:
+			CopyFile(dir, "_2_1." + normSuffix, "_2_2.f" + contentFieldIndex);
+			
+			// Create a bogus separate norms file for a
+			// segment/field that does not have a separate norms
+			// file already:
+			CopyFile(dir, "_2_1." + normSuffix, "_1_1." + normSuffix);
+			
+			// Create a bogus separate norms file for a
+			// segment/field that does not have a separate norms
+			// file already using the "not compound file" extension:
+			CopyFile(dir, "_2_1." + normSuffix, "_1_1.f" + contentFieldIndex);
+			
+			// Create a bogus separate del file for a
+			// segment that already has a separate del file: 
+			CopyFile(dir, "_0_1.del", "_0_2.del");
+			
+			// Create a bogus separate del file for a
+			// segment that does not yet have a separate del file:
+			CopyFile(dir, "_0_1.del", "_1_1.del");
+			
+			// Create a bogus separate del file for a
+			// non-existent segment:
+			CopyFile(dir, "_0_1.del", "_188_1.del");
+			
+			// Create a bogus segment file:
+			CopyFile(dir, "_0.cfs", "_188.cfs");
+			
+			// Create a bogus fnm file when the CFS already exists:
+			CopyFile(dir, "_0.cfs", "_0.fnm");
+			
+			// Create a deletable file:
+			CopyFile(dir, "_0.cfs", "deletable");
+			
+			// Create some old segments file:
+			CopyFile(dir, "segments_a", "segments");
+			CopyFile(dir, "segments_a", "segments_2");
+			
+			// Create a bogus cfs file shadowing a non-cfs segment:
+			CopyFile(dir, "_2.cfs", "_3.cfs");
+			
+			System.String[] filesPre = dir.List();
+			
+			// Open & close a writer: it should delete the above 4
+			// files and nothing more:
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer.Close();
+			
+			System.String[] files2 = dir.List();
+			dir.Close();
+			
+			System.Array.Sort(files);
+			System.Array.Sort(files2);
+			
+			if (!ArrayEquals(files, files2))
+			{
+				Assert.Fail("IndexFileDeleter failed to delete unreferenced extra files: should have deleted " + (filesPre.Length - files.Length) + " files but only deleted " + (filesPre.Length - files2.Length) + "; expected files:\n    " + AsString(files) + "\n  actual files:\n    " + AsString(files2));
+			}
+		}
+		
+		private System.String AsString(System.String[] l)
+		{
+			System.String s = "";
+			for (int i = 0; i < l.Length; i++)
+			{
+				if (i > 0)
+				{
+					s += "\n    ";
+				}
+				s += l[i];
+			}
+			return s;
+		}
+		
+		public virtual void  CopyFile(Directory dir, System.String src, System.String dest)
+		{
+			IndexInput in_Renamed = dir.OpenInput(src);
+			IndexOutput out_Renamed = dir.CreateOutput(dest);
+			byte[] b = new byte[1024];
+			long remainder = in_Renamed.Length();
+			while (remainder > 0)
+			{
+				int len = (int) System.Math.Min(b.Length, remainder);
+				in_Renamed.ReadBytes(b, 0, len);
+				out_Renamed.WriteBytes(b, len);
+				remainder -= len;
+			}
+		}
+		
+		private void  AddDoc(IndexWriter writer, int id)
+		{
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+			doc.Add(new Field("id", System.Convert.ToString(id), Field.Store.YES, Field.Index.UN_TOKENIZED));
+			writer.AddDocument(doc);
+		}
+
+        public static bool ArrayEquals(System.Array array1, System.Array array2)
+        {
+            bool result = false;
+            if ((array1 == null) && (array2 == null))
+                result = true;
+            else if ((array1 != null) && (array2 != null))
+            {
+                if (array1.Length == array2.Length)
+                {
+                    int length = array1.Length;
+                    result = true;
+                    for (int index = 0; index < length; index++)
+                    {
+                        if (!(array1.GetValue(index).Equals(array2.GetValue(index))))
+                        {
+                            result = false;
+                            break;
+                        }
+                    }
+                }
+            }
+            return result;
+        }
+    }
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexInput.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexInput.cs Sat Aug 11 09:56:37 2007
@@ -16,7 +16,9 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using IndexInput = Lucene.Net.Store.IndexInput;
 
 namespace Lucene.Net.Index
@@ -34,5 +36,41 @@
 			Assert.AreEqual(16385, is_Renamed.ReadVInt());
 			Assert.AreEqual("Lucene", is_Renamed.ReadString());
 		}
-	}
+		
+        /// <summary> Expert
+        /// 
+        /// </summary>
+        /// <throws>  IOException </throws>
+        [Test]
+        public virtual void  TestSkipChars()
+        {
+            byte[] bytes = new byte[]{(byte) 0x80, (byte) 0x01, (byte) 0xFF, (byte) 0x7F, (byte) 0x80, (byte) 0x80, (byte) 0x01, (byte) 0x81, (byte) 0x80, (byte) 0x01, (byte) 0x06, (byte) 'L', (byte) 'u', (byte) 'c', (byte) 'e', (byte) 'n', (byte) 'e'};
+            System.String utf8Str = "\u0634\u1ea1";
+            byte[] utf8Bytes = System.Text.Encoding.GetEncoding("UTF-8").GetBytes(utf8Str);
+            byte[] theBytes = new byte[bytes.Length + 1 + utf8Bytes.Length];
+            Array.Copy(bytes, 0, theBytes, 0, bytes.Length);
+            theBytes[bytes.Length] = (byte) utf8Str.Length; //Add in the number of chars we are storing, which should fit in a byte for this test 
+            Array.Copy(utf8Bytes, 0, theBytes, bytes.Length + 1, utf8Bytes.Length);
+            IndexInput is_Renamed = new MockIndexInput(theBytes);
+            Assert.AreEqual(128, is_Renamed.ReadVInt());
+            Assert.AreEqual(16383, is_Renamed.ReadVInt());
+            Assert.AreEqual(16384, is_Renamed.ReadVInt());
+            Assert.AreEqual(16385, is_Renamed.ReadVInt());
+            int charsToRead = is_Renamed.ReadVInt(); //number of chars in the Lucene string
+            Assert.IsTrue(0x06 == charsToRead, 0x06 + " does not equal: " + charsToRead);
+            is_Renamed.SkipChars(3);
+            char[] chars = new char[3]; //there should be 6 chars remaining
+            is_Renamed.ReadChars(chars, 0, 3);
+            System.String tmpStr = new System.String(chars);
+            Assert.IsTrue(tmpStr.Equals("ene") == true, tmpStr + " is not equal to " + "ene");
+            //Now read the UTF8 stuff
+            charsToRead = is_Renamed.ReadVInt() - 1; //since we are skipping one
+            is_Renamed.SkipChars(1);
+            Assert.IsTrue(utf8Str.Length - 1 == charsToRead, utf8Str.Length - 1 + " does not equal: " + charsToRead);
+            chars = new char[charsToRead];
+            is_Renamed.ReadChars(chars, 0, charsToRead);
+            tmpStr = new System.String(chars);
+            Assert.IsTrue(tmpStr.Equals(utf8Str.Substring(1)) == true, tmpStr + " is not equal to " + utf8Str.Substring(1));
+        }
+    }
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexModifier.cs?view=diff&rev=564939&r1=564938&r2=564939
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexModifier.cs Sat Aug 11 09:56:37 2007
@@ -16,12 +16,16 @@
  */
 
 using System;
+
 using NUnit.Framework;
+
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;



Mime
View raw message