lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r411501 [25/30] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/DemoLib/HTML/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Analysis/Standard/ Lucene.Net/Docu...
Date Sun, 04 Jun 2006 02:41:25 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexReader.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexReader.cs Sat Jun  3 19:41:13 2006
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -22,6 +23,7 @@
 using Directory = Lucene.Net.Store.Directory;
 using FSDirectory = Lucene.Net.Store.FSDirectory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
 	[TestFixture]
@@ -31,18 +33,37 @@
 		[STAThread]
 		public static void  Main(System.String[] args)
 		{
-            /*
-			TestRunner.run(new TestSuite(typeof(TestIndexReader)));
+			// NUnit.Core.TestRunner(new NUnit.Core.TestSuite(typeof(TestIndexReader)));  // {{Aroush}} where is 'TestRunner'?
 			//        TestRunner.run (new TestIndexReader("testBasicDelete"));
 			//        TestRunner.run (new TestIndexReader("testDeleteReaderWriterConflict"));
 			//        TestRunner.run (new TestIndexReader("testDeleteReaderReaderConflict"));
 			//        TestRunner.run (new TestIndexReader("testFilesOpenClose"));
-            */
+		}
+		
+		public virtual void  TestIsCurrent()
+		{
+			RAMDirectory d = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
+			AddDocumentWithFields(writer);
+			writer.Close();
+			// set up reader:
+			IndexReader reader = IndexReader.Open(d);
+			Assert.IsTrue(reader.IsCurrent());
+			// modify index by adding another document:
+			writer = new IndexWriter(d, new StandardAnalyzer(), false);
+			AddDocumentWithFields(writer);
+			writer.Close();
+			Assert.IsFalse(reader.IsCurrent());
+			// re-create index:
+			writer = new IndexWriter(d, new StandardAnalyzer(), true);
+			AddDocumentWithFields(writer);
+			writer.Close();
+			Assert.IsFalse(reader.IsCurrent());
+			reader.Close();
 		}
 		
 		/// <summary> Tests the IndexReader.getFieldNames implementation</summary>
 		/// <throws>  Exception on error </throws>
-		[Test]
 		public virtual void  TestGetFieldNames()
 		{
 			RAMDirectory d = new RAMDirectory();
@@ -52,54 +73,94 @@
 			writer.Close();
 			// set up reader
 			IndexReader reader = IndexReader.Open(d);
-            System.Collections.Hashtable fieldNames = (System.Collections.Hashtable) reader.GetFieldNames();
-			Assert.IsTrue(fieldNames.Contains("keyword"));
-			Assert.IsTrue(fieldNames.Contains("text"));
-			Assert.IsTrue(fieldNames.Contains("unindexed"));
-			Assert.IsTrue(fieldNames.Contains("unstored"));
+			System.Collections.ICollection fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+			Assert.IsTrue(CollectionContains(fieldNames, "keyword"));
+			Assert.IsTrue(CollectionContains(fieldNames, "text"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unindexed"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unstored"));
 			// add more documents
 			writer = new IndexWriter(d, new StandardAnalyzer(), false);
 			// want to get some more segments here
-			for (int i = 0; i < 5 * writer.mergeFactor; i++)
+			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
 			{
 				AddDocumentWithFields(writer);
 			}
 			// new fields are in some different segments (we hope)
-			for (int i = 0; i < 5 * writer.mergeFactor; i++)
+			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
 			{
 				AddDocumentWithDifferentFields(writer);
 			}
+			// new termvector fields
+			for (int i = 0; i < 5 * writer.GetMergeFactor(); i++)
+			{
+				AddDocumentWithTermVectorFields(writer);
+			}
+			
 			writer.Close();
 			// verify fields again
 			reader = IndexReader.Open(d);
-			fieldNames = (System.Collections.Hashtable) reader.GetFieldNames();
-            Assert.AreEqual(9, fieldNames.Count); // the following fields + an empty one (bug?!)
-			Assert.IsTrue(fieldNames.Contains("keyword"));
-			Assert.IsTrue(fieldNames.Contains("text"));
-			Assert.IsTrue(fieldNames.Contains("unindexed"));
-			Assert.IsTrue(fieldNames.Contains("unstored"));
-			Assert.IsTrue(fieldNames.Contains("keyword2"));
-			Assert.IsTrue(fieldNames.Contains("text2"));
-			Assert.IsTrue(fieldNames.Contains("unindexed2"));
-			Assert.IsTrue(fieldNames.Contains("unstored2"));
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+			Assert.AreEqual(13, fieldNames.Count); // the following fields
+			Assert.IsTrue(CollectionContains(fieldNames, "keyword"));
+			Assert.IsTrue(CollectionContains(fieldNames, "text"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unindexed"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unstored"));
+			Assert.IsTrue(CollectionContains(fieldNames, "keyword2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "text2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unindexed2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unstored2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvnot"));
+			Assert.IsTrue(CollectionContains(fieldNames, "termvector"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvposition"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvoffset"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvpositionoffset"));
 			
 			// verify that only indexed fields were returned
-			System.Collections.ICollection indexedFieldNames = reader.GetFieldNames(true);
-            Assert.AreEqual(6, indexedFieldNames.Count);
-			Assert.IsTrue(fieldNames.Contains("keyword"));
-			Assert.IsTrue(fieldNames.Contains("text"));
-			Assert.IsTrue(fieldNames.Contains("unstored"));
-			Assert.IsTrue(fieldNames.Contains("keyword2"));
-			Assert.IsTrue(fieldNames.Contains("text2"));
-			Assert.IsTrue(fieldNames.Contains("unstored2"));
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.INDEXED);
+			Assert.AreEqual(11, fieldNames.Count); // 6 original + the 5 termvector fields 
+			Assert.IsTrue(CollectionContains(fieldNames, "keyword"));
+			Assert.IsTrue(CollectionContains(fieldNames, "text"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unstored"));
+			Assert.IsTrue(CollectionContains(fieldNames, "keyword2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "text2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unstored2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvnot"));
+			Assert.IsTrue(CollectionContains(fieldNames, "termvector"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvposition"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvoffset"));
+			Assert.IsTrue(CollectionContains(fieldNames, "tvpositionoffset"));
 			
 			// verify that only unindexed fields were returned
-			System.Collections.ICollection unindexedFieldNames = reader.GetFieldNames(false);
-            Assert.AreEqual(3, unindexedFieldNames.Count); // the following fields + an empty one
-			Assert.IsTrue(fieldNames.Contains("unindexed"));
-            Assert.IsTrue(fieldNames.Contains("unindexed2"));
-		}
-		
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.UNINDEXED);
+			Assert.AreEqual(2, fieldNames.Count); // the following fields
+			Assert.IsTrue(CollectionContains(fieldNames, "unindexed"));
+			Assert.IsTrue(CollectionContains(fieldNames, "unindexed2"));
+			
+			// verify index term vector fields  
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR);
+			Assert.AreEqual(1, fieldNames.Count); // 1 field has term vector only
+			Assert.IsTrue(CollectionContains(fieldNames, "termvector"));
+			
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION);
+			Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
+			Assert.IsTrue(CollectionContains(fieldNames, "tvposition"));
+			
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET);
+			Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
+			Assert.IsTrue(CollectionContains(fieldNames, "tvoffset"));
+			
+			fieldNames = reader.GetFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
+			Assert.AreEqual(1, fieldNames.Count); // 4 fields are indexed with term vectors
+			Assert.IsTrue(CollectionContains(fieldNames, "tvpositionoffset"));
+		}
+
+        public static bool CollectionContains(System.Collections.ICollection col, System.String val)
+        {
+            foreach (object item in col)
+                if (item.ToString() == val)
+                    return true;
+            return false;
+        }
 		
 		private void  AssertTermDocsCount(System.String msg, IndexReader reader, Term term, int expected)
 		{
@@ -108,7 +169,7 @@
 			try
 			{
 				tdocs = reader.TermDocs(term);
-                Assert.IsNotNull(tdocs, msg + ", null TermDocs");
+				Assert.IsNotNull(tdocs, msg + ", null TermDocs");
 				int count = 0;
 				while (tdocs.Next())
 				{
@@ -119,13 +180,7 @@
 			finally
 			{
 				if (tdocs != null)
-					try
-					{
-						tdocs.Close();
-					}
-					catch (System.Exception e)
-					{
-					}
+					tdocs.Close();
 			}
 		}
 		
@@ -175,8 +230,8 @@
 			DeleteReaderWriterConflict(false);
 		}
 		
-        [Test]
-		public virtual void  TestDeleteReaderWriterConflictOptimized()
+		[Test]
+        public virtual void  TestDeleteReaderWriterConflictOptimized()
 		{
 			DeleteReaderWriterConflict(true);
 		}
@@ -269,11 +324,11 @@
 		
 		private Directory GetDirectory(bool create)
 		{
-            return FSDirectory.GetDirectory(new System.IO.FileInfo(SupportClass.AppSettings.Get("tempDir", "") + "\\" + "testIndex"), create);
+			return FSDirectory.GetDirectory(System.IO.Path.Combine(System.Configuration.ConfigurationSettings.AppSettings.Get("tempDir"), "testIndex"), create);
 		}
 		
-        [Test]
-		public virtual void  TestFilesOpenClose()
+		[Test]
+        public virtual void  TestFilesOpenClose()
 		{
 			// Create initial data set
 			Directory dir = GetDirectory(true);
@@ -301,14 +356,80 @@
 			dir = GetDirectory(true);
 		}
 		
-        [Test]
-		public virtual void  TestDeleteReaderReaderConflictUnoptimized()
+		[Test]
+        public virtual void  TestLastModified()
+		{
+			Assert.IsFalse(IndexReader.IndexExists("there_is_no_such_index"));
+			Directory dir = new RAMDirectory();
+			Assert.IsFalse(IndexReader.IndexExists(dir));
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			AddDocumentWithFields(writer);
+			Assert.IsTrue(IndexReader.IsLocked(dir)); // writer open, so dir is locked
+			writer.Close();
+			Assert.IsTrue(IndexReader.IndexExists(dir));
+			IndexReader reader = IndexReader.Open(dir);
+			Assert.IsFalse(IndexReader.IsLocked(dir)); // reader only, no lock
+			long version = IndexReader.LastModified(dir);
+			reader.Close();
+			// modify index and check version has been incremented:
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			AddDocumentWithFields(writer);
+			writer.Close();
+			reader = IndexReader.Open(dir);
+			Assert.IsTrue(version < IndexReader.GetCurrentVersion(dir));
+			reader.Close();
+		}
+		
+		[Test]
+        public virtual void  TestLock()
+		{
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			AddDocumentWithFields(writer);
+			writer.Close();
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			IndexReader reader = IndexReader.Open(dir);
+			try
+			{
+				reader.Delete(0);
+				Assert.Fail("expected lock");
+			}
+			catch (System.IO.IOException e)
+			{
+				// expected exception
+			}
+			IndexReader.Unlock(dir); // this should not be done in the real world! 
+			reader.Delete(0);
+			reader.Close();
+			writer.Close();
+		}
+		
+		[Test]
+        public virtual void  TestUndeleteAll()
+		{
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			AddDocumentWithFields(writer);
+			AddDocumentWithFields(writer);
+			writer.Close();
+			IndexReader reader = IndexReader.Open(dir);
+			reader.Delete(0);
+			reader.Delete(1);
+			reader.UndeleteAll();
+			reader.Close();
+			reader = IndexReader.Open(dir);
+			Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
+			reader.Close();
+		}
+		
+		[Test]
+        public virtual void  TestDeleteReaderReaderConflictUnoptimized()
 		{
 			DeleteReaderReaderConflict(false);
 		}
 		
-        [Test]
-		public virtual void  TestDeleteReaderReaderConflictOptimized()
+		[Test]
+        public virtual void  TestDeleteReaderReaderConflictOptimized()
 		{
 			DeleteReaderReaderConflict(true);
 		}
@@ -422,37 +543,41 @@
 		
 		private void  AddDocumentWithFields(IndexWriter writer)
 		{
-			Document doc = new Document();
-			doc.Add(Field.Keyword("keyword", "test1"));
-			doc.Add(Field.Text("text", "test1"));
-			doc.Add(Field.UnIndexed("unindexed", "test1"));
-			doc.Add(Field.UnStored("unstored", "test1"));
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("keyword", "test1", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("text", "test1", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("unindexed", "test1", Field.Store.YES, Field.Index.NO));
+			doc.Add(new Field("unstored", "test1", Field.Store.NO, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
 		}
 		
 		private void  AddDocumentWithDifferentFields(IndexWriter writer)
 		{
-			Document doc = new Document();
-			doc.Add(Field.Keyword("keyword2", "test1"));
-			doc.Add(Field.Text("text2", "test1"));
-			doc.Add(Field.UnIndexed("unindexed2", "test1"));
-			doc.Add(Field.UnStored("unstored2", "test1"));
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("keyword2", "test1", Field.Store.YES, Field.Index.UN_TOKENIZED));
+			doc.Add(new Field("text2", "test1", Field.Store.YES, Field.Index.TOKENIZED));
+			doc.Add(new Field("unindexed2", "test1", Field.Store.YES, Field.Index.NO));
+			doc.Add(new Field("unstored2", "test1", Field.Store.NO, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
 		}
 		
-		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
+		private void  AddDocumentWithTermVectorFields(IndexWriter writer)
 		{
-			Document doc = new Document();
-			doc.Add(Field.UnStored("content", value_Renamed));
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("tvnot", "tvnot", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.NO));
+			doc.Add(new Field("termvector", "termvector", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.YES));
+			doc.Add(new Field("tvoffset", "tvoffset", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_OFFSETS));
+			doc.Add(new Field("tvposition", "tvposition", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS));
+			doc.Add(new Field("tvpositionoffset", "tvpositionoffset", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
 			
-			try
-			{
-				writer.AddDocument(doc);
-			}
-			catch (System.IO.IOException e)
-			{
-                System.Console.Error.WriteLine(e.StackTrace);
-			}
+			writer.AddDocument(doc);
+		}
+		
+		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
+		{
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriter.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs Sat Jun  3 19:41:13 2006
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -20,19 +21,20 @@
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
 	
 	
 	/// <author>  goller
 	/// </author>
-	/// <version>  $Id: TestIndexWriter.java,v 1.3 2003/10/13 14:31:38 otis Exp $
+	/// <version>  $Id: TestIndexWriter.java 208807 2005-07-01 22:13:53Z dnaber $
 	/// </version>
-    [TestFixture]
+	[TestFixture]
     public class TestIndexWriter
 	{
-        [Test]
-		public virtual void  TestDocCount()
+		[Test]
+        public virtual void  TestDocCount()
 		{
 			Directory dir = new RAMDirectory();
 			
@@ -40,67 +42,52 @@
 			IndexReader reader = null;
 			int i;
 			
-			try
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			
+			// add 100 documents
+			for (i = 0; i < 100; i++)
 			{
-				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-				
-				// add 100 documents
-				for (i = 0; i < 100; i++)
-				{
-					AddDoc(writer);
-				}
-				Assert.AreEqual(100, writer.DocCount());
-				writer.Close();
-				
-				// delete 40 documents
-				reader = IndexReader.Open(dir);
-				for (i = 0; i < 40; i++)
-				{
-					reader.Delete(i);
-				}
-				reader.Close();
-				
-				// test doc count before segments are merged/index is optimized
-				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
-				Assert.AreEqual(100, writer.DocCount());
-				writer.Close();
-				
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(100, reader.MaxDoc());
-				Assert.AreEqual(60, reader.NumDocs());
-				reader.Close();
-				
-				// optimize the index and check that the new doc count is correct
-				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
-				writer.Optimize();
-				Assert.AreEqual(60, writer.DocCount());
-				writer.Close();
-				
-				// check that the index reader gives the same numbers.
-				reader = IndexReader.Open(dir);
-				Assert.AreEqual(60, reader.MaxDoc());
-				Assert.AreEqual(60, reader.NumDocs());
-				reader.Close();
+				AddDoc(writer);
 			}
-			catch (System.IO.IOException e)
+			Assert.AreEqual(100, writer.DocCount());
+			writer.Close();
+			
+			// delete 40 documents
+			reader = IndexReader.Open(dir);
+			for (i = 0; i < 40; i++)
 			{
-                System.Console.Error.WriteLine(e.StackTrace);
+				reader.Delete(i);
 			}
+			reader.Close();
+			
+			// test doc count before segments are merged/index is optimized
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			Assert.AreEqual(100, writer.DocCount());
+			writer.Close();
+			
+			reader = IndexReader.Open(dir);
+			Assert.AreEqual(100, reader.MaxDoc());
+			Assert.AreEqual(60, reader.NumDocs());
+			reader.Close();
+			
+			// optimize the index and check that the new doc count is correct
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer.Optimize();
+			Assert.AreEqual(60, writer.DocCount());
+			writer.Close();
+			
+			// check that the index reader gives the same numbers.
+			reader = IndexReader.Open(dir);
+			Assert.AreEqual(60, reader.MaxDoc());
+			Assert.AreEqual(60, reader.NumDocs());
+			reader.Close();
 		}
 		
 		private void  AddDoc(IndexWriter writer)
 		{
-			Document doc = new Document();
-			doc.Add(Field.UnStored("content", "aaa"));
-			
-			try
-			{
-				writer.AddDocument(doc);
-			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-			}
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestMultiReader.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestMultiReader.cs Sat Jun  3 19:41:13 2006
@@ -13,57 +13,44 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using Document = Lucene.Net.Documents.Document;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestMultiReader
 	{
 		private Directory dir = new RAMDirectory();
-		private Document doc1 = new Document();
-		private Document doc2 = new Document();
+		private Lucene.Net.Documents.Document doc1 = new Lucene.Net.Documents.Document();
+		private Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
 		private SegmentReader reader1;
 		private SegmentReader reader2;
 		private SegmentReader[] readers = new SegmentReader[2];
 		private SegmentInfos sis = new SegmentInfos();
 		
-        [TestFixtureSetUp]
-		protected virtual void  SetUp()
+		
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
 			DocHelper.SetupDoc(doc1);
 			DocHelper.SetupDoc(doc2);
 			DocHelper.WriteDoc(dir, "seg-1", doc1);
 			DocHelper.WriteDoc(dir, "seg-2", doc2);
-			
-			try
-			{
-				sis.Write(dir);
-				reader1 = new SegmentReader(new SegmentInfo("seg-1", 1, dir));
-				reader2 = new SegmentReader(new SegmentInfo("seg-2", 1, dir));
-				readers[0] = reader1;
-				readers[1] = reader2;
-			}
-			catch (System.IO.IOException e)
-			{
-                System.Console.Error.WriteLine(e.StackTrace);
-			}
-		}
-		/*IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-		writer.addDocument(doc1);
-		writer.addDocument(doc2);
-		writer.close();*/
-        [TestFixtureTearDown]
-		protected virtual void  TearDown()
-		{
-			
+			sis.Write(dir);
+			reader1 = SegmentReader.Get(new SegmentInfo("seg-1", 1, dir));
+			reader2 = SegmentReader.Get(new SegmentInfo("seg-2", 1, dir));
+			readers[0] = reader1;
+			readers[1] = reader2;
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			Assert.IsTrue(dir != null);
 			Assert.IsTrue(reader1 != null);
@@ -71,43 +58,41 @@
 			Assert.IsTrue(sis != null);
 		}
 		
-        [Test]
-		public virtual void  TestDocument()
+		[Test]
+        public virtual void  TestDocument()
+		{
+			sis.Read(dir);
+			MultiReader reader = new MultiReader(dir, sis, false, readers);
+			Assert.IsTrue(reader != null);
+			Lucene.Net.Documents.Document newDoc1 = reader.Document(0);
+			Assert.IsTrue(newDoc1 != null);
+			Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
+			Lucene.Net.Documents.Document newDoc2 = reader.Document(1);
+			Assert.IsTrue(newDoc2 != null);
+			Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);
+			TermFreqVector vector = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
+			Assert.IsTrue(vector != null);
+			TestSegmentReader.CheckNorms(reader);
+		}
+		
+		[Test]
+        public virtual void  TestUndeleteAll()
 		{
-			try
-			{
-				sis.Read(dir);
-				MultiReader reader = new MultiReader(dir, sis, false, readers);
-				Assert.IsTrue(reader != null);
-				Document newDoc1 = reader.Document(0);
-				Assert.IsTrue(newDoc1 != null);
-				Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - 2);
-				Document newDoc2 = reader.Document(1);
-				Assert.IsTrue(newDoc2 != null);
-				Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - 2);
-				TermFreqVector vector = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
-				Assert.IsTrue(vector != null);
-			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
-			}
+			sis.Read(dir);
+			MultiReader reader = new MultiReader(dir, sis, false, readers);
+			Assert.IsTrue(reader != null);
+			Assert.AreEqual(2, reader.NumDocs());
+			reader.Delete(0);
+			Assert.AreEqual(1, reader.NumDocs());
+			reader.UndeleteAll();
+			Assert.AreEqual(2, reader.NumDocs());
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestTermVectors()
 		{
-			try
-			{
-				MultiReader reader = new MultiReader(dir, sis, false, readers);
-				Assert.IsTrue(reader != null);
-			}
-			catch (System.IO.IOException e)
-			{
-                System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
-			}
+			MultiReader reader = new MultiReader(dir, sis, false, readers);
+			Assert.IsTrue(reader != null);
 		}
 	}
 }

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestParallelReader.cs?rev=411501&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelReader.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestParallelReader.cs Sat Jun  3 19:41:13 2006
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2004 The Apache Software Foundation
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using NUnit.Framework;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Search;
+using Searchable = Lucene.Net.Search.Searchable;
+using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Index
+{
+	[TestFixture]
+	public class TestParallelReader
+	{
+		
+		private Searcher parallel;
+		private Searcher single;
+		
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
+		{
+			single = Single();
+			parallel = Parallel();
+		}
+		
+		[Test]
+        public virtual void  TestQueries()
+		{
+			QueryTest(new TermQuery(new Term("f1", "v1")));
+			QueryTest(new TermQuery(new Term("f1", "v2")));
+			QueryTest(new TermQuery(new Term("f2", "v1")));
+			QueryTest(new TermQuery(new Term("f2", "v2")));
+			QueryTest(new TermQuery(new Term("f3", "v1")));
+			QueryTest(new TermQuery(new Term("f3", "v2")));
+			QueryTest(new TermQuery(new Term("f4", "v1")));
+			QueryTest(new TermQuery(new Term("f4", "v2")));
+			
+			BooleanQuery bq1 = new BooleanQuery();
+			bq1.Add(new TermQuery(new Term("f1", "v1")), Occur.MUST);
+			bq1.Add(new TermQuery(new Term("f4", "v1")), Occur.MUST);
+			QueryTest(bq1);
+		}
+		
+		[Test]
+        public virtual void  TestFieldNames()
+		{
+			Directory dir1 = GetDir1();
+			Directory dir2 = GetDir2();
+			ParallelReader pr = new ParallelReader();
+			pr.Add(IndexReader.Open(dir1));
+			pr.Add(IndexReader.Open(dir2));
+			System.Collections.ICollection fieldNames = pr.GetFieldNames(IndexReader.FieldOption.ALL);
+			Assert.AreEqual(4, fieldNames.Count);
+			Assert.IsTrue(CollectionContains(fieldNames, "f1"));
+			Assert.IsTrue(CollectionContains(fieldNames, "f2"));
+			Assert.IsTrue(CollectionContains(fieldNames, "f3"));
+			Assert.IsTrue(CollectionContains(fieldNames, "f4"));
+		}
+
+        public static bool CollectionContains(System.Collections.ICollection col, System.String val)
+        {
+            foreach (object item in col)
+                if (item.ToString() == val)
+                    return true;
+            return false;
+        }
+		
+		[Test]
+        public virtual void  TestIncompatibleIndexes()
+		{
+			// two documents:
+			Directory dir1 = GetDir1();
+			
+			// one document only:
+			Directory dir2 = new RAMDirectory();
+			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true);
+			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+			d3.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			w2.AddDocument(d3);
+			w2.Close();
+			
+			ParallelReader pr = new ParallelReader();
+			pr.Add(IndexReader.Open(dir1));
+			try
+			{
+				pr.Add(IndexReader.Open(dir2));
+				Assert.Fail("didn't get exptected exception: indexes don't have same number of documents");
+			}
+			catch (System.ArgumentException e)
+			{
+				// expected exception
+			}
+		}
+		
+		private void  QueryTest(Query query)
+		{
+			Hits parallelHits = parallel.Search(query);
+			Hits singleHits = single.Search(query);
+			Assert.AreEqual(parallelHits.Length(), singleHits.Length());
+			for (int i = 0; i < parallelHits.Length(); i++)
+			{
+				Assert.AreEqual(parallelHits.Score(i), singleHits.Score(i), 0.001f);
+				Lucene.Net.Documents.Document docParallel = parallelHits.Doc(i);
+				Lucene.Net.Documents.Document docSingle = singleHits.Doc(i);
+				Assert.AreEqual(docParallel.Get("f1"), docSingle.Get("f1"));
+				Assert.AreEqual(docParallel.Get("f2"), docSingle.Get("f2"));
+				Assert.AreEqual(docParallel.Get("f3"), docSingle.Get("f3"));
+				Assert.AreEqual(docParallel.Get("f4"), docSingle.Get("f4"));
+			}
+		}
+		
+		// Fiels 1-4 indexed together:
+		private Searcher Single()
+		{
+			Directory dir = new RAMDirectory();
+			IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true);
+			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			d1.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			d1.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			d1.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			d1.Add(new Field("f4", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			w.AddDocument(d1);
+			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+			d2.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			d2.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			d2.Add(new Field("f3", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			d2.Add(new Field("f4", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			w.AddDocument(d2);
+			w.Close();
+			
+			return new IndexSearcher(dir);
+		}
+		
+		// Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
+		private Searcher Parallel()
+		{
+			Directory dir1 = GetDir1();
+			Directory dir2 = GetDir2();
+			ParallelReader pr = new ParallelReader();
+			pr.Add(IndexReader.Open(dir1));
+			pr.Add(IndexReader.Open(dir2));
+			return new IndexSearcher(pr);
+		}
+		
+		private Directory GetDir1()
+		{
+			Directory dir1 = new RAMDirectory();
+			IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true);
+			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			d1.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			d1.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			w1.AddDocument(d1);
+			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+			d2.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			d2.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			w1.AddDocument(d2);
+			w1.Close();
+			return dir1;
+		}
+		
+		private Directory GetDir2()
+		{
+			Directory dir2 = new RAMDirectory();
+			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true);
+			Lucene.Net.Documents.Document d3 = new Lucene.Net.Documents.Document();
+			d3.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			d3.Add(new Field("f4", "v1", Field.Store.YES, Field.Index.TOKENIZED));
+			w2.AddDocument(d3);
+			Lucene.Net.Documents.Document d4 = new Lucene.Net.Documents.Document();
+			d4.Add(new Field("f3", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			d4.Add(new Field("f4", "v2", Field.Store.YES, Field.Index.TOKENIZED));
+			w2.AddDocument(d4);
+			w2.Close();
+			return dir2;
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentMerger.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentMerger.cs Sat Jun  3 19:41:13 2006
@@ -13,13 +13,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using Document = Lucene.Net.Documents.Document;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestSegmentMerger
 	{
@@ -28,107 +31,88 @@
 		private System.String mergedSegment = "test";
 		//First segment to be merged
 		private Directory merge1Dir = new RAMDirectory();
-		private Document doc1 = new Document();
+		private Lucene.Net.Documents.Document doc1 = new Lucene.Net.Documents.Document();
 		private System.String merge1Segment = "test-1";
 		private SegmentReader reader1 = null;
 		//Second Segment to be merged
 		private Directory merge2Dir = new RAMDirectory();
-		private Document doc2 = new Document();
+		private Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
 		private System.String merge2Segment = "test-2";
 		private SegmentReader reader2 = null;
 		
-        [TestFixtureSetUp]
-		protected virtual void  SetUp()
+		
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
 			DocHelper.SetupDoc(doc1);
 			DocHelper.WriteDoc(merge1Dir, merge1Segment, doc1);
 			DocHelper.SetupDoc(doc2);
 			DocHelper.WriteDoc(merge2Dir, merge2Segment, doc2);
-			try
-			{
-				reader1 = new SegmentReader(new SegmentInfo(merge1Segment, 1, merge1Dir));
-				reader2 = new SegmentReader(new SegmentInfo(merge2Segment, 1, merge2Dir));
-			}
-			catch (System.IO.IOException e)
-			{
-                System.Console.Error.WriteLine(e.StackTrace);
-			}
-		}
-		
-        [TestFixtureTearDown]
-		protected virtual void  TearDown()
-		{
-			
+			reader1 = SegmentReader.Get(new SegmentInfo(merge1Segment, 1, merge1Dir));
+			reader2 = SegmentReader.Get(new SegmentInfo(merge2Segment, 1, merge2Dir));
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
-            Assert.IsTrue(mergedDir != null);
+			Assert.IsTrue(mergedDir != null);
 			Assert.IsTrue(merge1Dir != null);
 			Assert.IsTrue(merge2Dir != null);
 			Assert.IsTrue(reader1 != null);
 			Assert.IsTrue(reader2 != null);
 		}
 		
-        [Test]
-		public virtual void  TestMerge()
+		[Test]
+        public virtual void  TestMerge()
 		{
-			//System.out.println("----------------TestMerge------------------");
-			SegmentMerger merger = new SegmentMerger(mergedDir, mergedSegment, false);
+			SegmentMerger merger = new SegmentMerger(mergedDir, mergedSegment);
 			merger.Add(reader1);
 			merger.Add(reader2);
-			try
-			{
-				int docsMerged = merger.Merge();
-				merger.CloseReaders();
-				Assert.IsTrue(docsMerged == 2);
-				//Should be able to open a new SegmentReader against the new directory
-				SegmentReader mergedReader = new SegmentReader(new SegmentInfo(mergedSegment, docsMerged, mergedDir));
-				Assert.IsTrue(mergedReader != null);
-				Assert.IsTrue(mergedReader.NumDocs() == 2);
-				Document newDoc1 = mergedReader.Document(0);
-				Assert.IsTrue(newDoc1 != null);
-				//There are 2 unstored fields on the document
-				Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - 2);
-				Document newDoc2 = mergedReader.Document(1);
-				Assert.IsTrue(newDoc2 != null);
-				Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - 2);
-				
-				TermDocs termDocs = mergedReader.TermDocs(new Term(DocHelper.TEXT_FIELD_2_KEY, "Field"));
-				Assert.IsTrue(termDocs != null);
-				Assert.IsTrue(termDocs.Next() == true);
-				
-				System.Collections.ICollection stored = mergedReader.GetIndexedFieldNames(true);
-				Assert.IsTrue(stored != null);
-				//System.out.println("stored size: " + stored.size());
-				Assert.IsTrue(stored.Count == 2);
-				
-				TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
-				Assert.IsTrue(vector != null);
-				System.String[] terms = vector.GetTerms();
-				Assert.IsTrue(terms != null);
-				//System.out.println("Terms size: " + terms.length);
-				Assert.IsTrue(terms.Length == 3);
-				int[] freqs = vector.GetTermFrequencies();
-				Assert.IsTrue(freqs != null);
-				//System.out.println("Freqs size: " + freqs.length);
-				
-				for (int i = 0; i < terms.Length; i++)
-				{
-					System.String term = terms[i];
-					int freq = freqs[i];
-					//System.out.println("Term: " + term + " Freq: " + freq);
-					Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != - 1);
-					Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
-				}
-			}
-			catch (System.IO.IOException e)
+			int docsMerged = merger.Merge();
+			merger.CloseReaders();
+			Assert.IsTrue(docsMerged == 2);
+			//Should be able to open a new SegmentReader against the new directory
+			SegmentReader mergedReader = SegmentReader.Get(new SegmentInfo(mergedSegment, docsMerged, mergedDir));
+			Assert.IsTrue(mergedReader != null);
+			Assert.IsTrue(mergedReader.NumDocs() == 2);
+			Lucene.Net.Documents.Document newDoc1 = mergedReader.Document(0);
+			Assert.IsTrue(newDoc1 != null);
+			//There are 2 unstored fields on the document
+			Assert.IsTrue(DocHelper.NumFields(newDoc1) == DocHelper.NumFields(doc1) - DocHelper.unstored.Count);
+			Lucene.Net.Documents.Document newDoc2 = mergedReader.Document(1);
+			Assert.IsTrue(newDoc2 != null);
+			Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.unstored.Count);
+			
+			TermDocs termDocs = mergedReader.TermDocs(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));
+			Assert.IsTrue(termDocs != null);
+			Assert.IsTrue(termDocs.Next() == true);
+			
+			System.Collections.ICollection stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
+			Assert.IsTrue(stored != null);
+			//System.out.println("stored size: " + stored.size());
+			Assert.IsTrue(stored.Count == 2);
+			
+			TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
+			Assert.IsTrue(vector != null);
+			System.String[] terms = vector.GetTerms();
+			Assert.IsTrue(terms != null);
+			//System.out.println("Terms size: " + terms.length);
+			Assert.IsTrue(terms.Length == 3);
+			int[] freqs = vector.GetTermFrequencies();
+			Assert.IsTrue(freqs != null);
+			//System.out.println("Freqs size: " + freqs.length);
+			Assert.IsTrue(vector is TermPositionVector == true);
+			
+			for (int i = 0; i < terms.Length; i++)
 			{
-                System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				System.String term = terms[i];
+				int freq = freqs[i];
+				//System.out.println("Term: " + term + " Freq: " + freq);
+				Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != - 1);
+				Assert.IsTrue(DocHelper.FIELD_2_FREQS[i] == freq);
 			}
-			//System.out.println("---------------------end TestMerge-------------------");
+			
+			TestSegmentReader.CheckNorms(mergedReader);
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentReader.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentReader.cs Sat Jun  3 19:41:13 2006
@@ -13,190 +13,157 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestSegmentReader
 	{
 		private RAMDirectory dir = new RAMDirectory();
-		private Document testDoc = new Document();
+		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		private SegmentReader reader = null;
 		
 		//TODO: Setup the reader w/ multiple documents
-        [TestFixtureSetUp]
-		protected virtual void  SetUp()
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
-			
-			try
-			{
-				DocHelper.SetupDoc(testDoc);
-				DocHelper.WriteDoc(dir, testDoc);
-				reader = new SegmentReader(new SegmentInfo("test", 1, dir));
-			}
-			catch (System.IO.IOException e)
-			{
-				
-			}
+			DocHelper.SetupDoc(testDoc);
+			DocHelper.WriteDoc(dir, testDoc);
+			reader = SegmentReader.Get(new SegmentInfo("test", 1, dir));
 		}
 		
-        [TestFixtureTearDown]
-		protected virtual void  TearDown()
+		[TestFixtureTearDown]
+        public virtual void  TearDown()
 		{
 			
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			Assert.IsTrue(dir != null);
 			Assert.IsTrue(reader != null);
 			Assert.IsTrue(DocHelper.nameValues.Count > 0);
-			Assert.IsTrue(DocHelper.NumFields(testDoc) == 6);
+			Assert.IsTrue(DocHelper.NumFields(testDoc) == DocHelper.all.Count);
 		}
 		
-        [Test]
-		public virtual void  TestDocument()
+		[Test]
+        public virtual void  TestDocument()
 		{
-			try
+			Assert.IsTrue(reader.NumDocs() == 1);
+			Assert.IsTrue(reader.MaxDoc() >= 1);
+			Lucene.Net.Documents.Document result = reader.Document(0);
+			Assert.IsTrue(result != null);
+			//There are 2 unstored fields on the document that are not preserved across writing
+			Assert.IsTrue(DocHelper.NumFields(result) == DocHelper.NumFields(testDoc) - DocHelper.unstored.Count);
+			
+			System.Collections.IEnumerator fields = result.Fields();
+			while (fields.MoveNext())
 			{
-				Assert.IsTrue(reader.NumDocs() == 1);
-				Assert.IsTrue(reader.MaxDoc() >= 1);
-				Document result = reader.Document(0);
-				Assert.IsTrue(result != null);
-				//There are 2 unstored fields on the document that are not preserved across writing
-				Assert.IsTrue(DocHelper.NumFields(result) == DocHelper.NumFields(testDoc) - 2);
-				
-                foreach (Field field in result.Fields())
-                {
-                    Assert.IsTrue(field != null);
-                    Assert.IsTrue(DocHelper.nameValues.Contains(field.Name()));
-                }
-			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				Field field = (Field) fields.Current;
+				Assert.IsTrue(field != null);
+				Assert.IsTrue(DocHelper.nameValues.Contains(field.Name()));
 			}
 		}
 		
-        [Test]
-		public virtual void  TestDelete()
+		[Test]
+        public virtual void  TestDelete()
 		{
-			Document docToDelete = new Document();
+			Lucene.Net.Documents.Document docToDelete = new Lucene.Net.Documents.Document();
 			DocHelper.SetupDoc(docToDelete);
 			DocHelper.WriteDoc(dir, "seg-to-delete", docToDelete);
+			SegmentReader deleteReader = SegmentReader.Get(new SegmentInfo("seg-to-delete", 1, dir));
+			Assert.IsTrue(deleteReader != null);
+			Assert.IsTrue(deleteReader.NumDocs() == 1);
+			deleteReader.Delete(0);
+			Assert.IsTrue(deleteReader.IsDeleted(0) == true);
+			Assert.IsTrue(deleteReader.HasDeletions() == true);
+			Assert.IsTrue(deleteReader.NumDocs() == 0);
 			try
 			{
-				SegmentReader deleteReader = new SegmentReader(new SegmentInfo("seg-to-delete", 1, dir));
-				Assert.IsTrue(deleteReader != null);
-				Assert.IsTrue(deleteReader.NumDocs() == 1);
-				deleteReader.Delete(0);
-				Assert.IsTrue(deleteReader.IsDeleted(0) == true);
-				Assert.IsTrue(deleteReader.HasDeletions() == true);
-				Assert.IsTrue(deleteReader.NumDocs() == 0);
-				try
-				{
-					Document test = deleteReader.Document(0);
-					Assert.IsTrue(false);
-				}
-				catch (System.ArgumentException e)
-				{
-					Assert.IsTrue(true);
-				}
+				deleteReader.Document(0);
+				Assert.Fail();
 			}
-			catch (System.IO.IOException e)
+			catch (System.ArgumentException e)
 			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				// expcected exception
 			}
 		}
 		
-        [Test]
-		public virtual void  TestGetFieldNameVariations()
+		[Test]
+        public virtual void  TestGetFieldNameVariations()
 		{
-			try
-			{
-				System.Collections.ICollection result = reader.GetFieldNames();
-				Assert.IsTrue(result != null);
-				Assert.IsTrue(result.Count == 7);
-				for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
-				{
-                    System.Collections.DictionaryEntry fi = (System.Collections.DictionaryEntry) iter.Current;
-                    System.String s = fi.Key.ToString();
-					//System.out.println("Name: " + s);
-					Assert.IsTrue(DocHelper.nameValues.Contains(s) == true || s.Equals(""));
-				}
-				result = reader.GetFieldNames(true);
-				Assert.IsTrue(result != null);
-				//      System.out.println("Size: " + result.size());
-				Assert.IsTrue(result.Count == 5);
-				for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
-				{
-                    System.Collections.DictionaryEntry fi = (System.Collections.DictionaryEntry) iter.Current;
-                    System.String s = fi.Key.ToString();
-                    Assert.IsTrue(DocHelper.nameValues.Contains(s) == true || s.Equals(""));
-				}
-				
-				result = reader.GetFieldNames(false);
-				Assert.IsTrue(result != null);
-				Assert.IsTrue(result.Count == 2);
-				//Get all indexed fields that are storing term vectors
-				result = reader.GetIndexedFieldNames(true);
-				Assert.IsTrue(result != null);
-				Assert.IsTrue(result.Count == 2);
-				
-				result = reader.GetIndexedFieldNames(false);
-				Assert.IsTrue(result != null);
-				Assert.IsTrue(result.Count == 3);
-			}
-			catch (System.IO.IOException e)
+			System.Collections.ICollection result = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+			Assert.IsTrue(result != null);
+			Assert.IsTrue(result.Count == DocHelper.all.Count);
+			for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
 			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				System.String s = (System.String) iter.Current;
+				//System.out.println("Name: " + s);
+				Assert.IsTrue(DocHelper.nameValues.Contains(s) == true || s.Equals(""));
 			}
-		}
-		
-        [Test]
-		public virtual void  TestTerms()
-		{
-			try
-			{
-				TermEnum terms = reader.Terms();
-				Assert.IsTrue(terms != null);
-				while (terms.Next() == true)
-				{
-					Term term = terms.Term();
-					Assert.IsTrue(term != null);
-					//System.out.println("Term: " + term);
-					System.String fieldValue = (System.String) DocHelper.nameValues[term.Field()];
-					Assert.IsTrue(fieldValue.IndexOf(term.Text()) != - 1);
-				}
-				
-				TermDocs termDocs = reader.TermDocs();
-				Assert.IsTrue(termDocs != null);
-				termDocs.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "Field"));
-				Assert.IsTrue(termDocs.Next() == true);
-				
-				TermPositions positions = reader.TermPositions();
-				positions.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "Field"));
-				Assert.IsTrue(positions != null);
-				Assert.IsTrue(positions.Doc() == 0);
-				Assert.IsTrue(positions.NextPosition() >= 0);
-			}
-			catch (System.IO.IOException e)
+			result = reader.GetFieldNames(IndexReader.FieldOption.INDEXED);
+			Assert.IsTrue(result != null);
+			Assert.IsTrue(result.Count == DocHelper.indexed.Count);
+			for (System.Collections.IEnumerator iter = result.GetEnumerator(); iter.MoveNext(); )
 			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				System.String s = (System.String) iter.Current;
+				Assert.IsTrue(DocHelper.indexed.Contains(s) == true || s.Equals(""));
 			}
+			
+			result = reader.GetFieldNames(IndexReader.FieldOption.UNINDEXED);
+			Assert.IsTrue(result != null);
+			Assert.IsTrue(result.Count == DocHelper.unindexed.Count);
+			//Get all indexed fields that are storing term vectors
+			result = reader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
+			Assert.IsTrue(result != null);
+			Assert.IsTrue(result.Count == DocHelper.termvector.Count);
+			
+			result = reader.GetFieldNames(IndexReader.FieldOption.INDEXED_NO_TERMVECTOR);
+			Assert.IsTrue(result != null);
+			Assert.IsTrue(result.Count == DocHelper.notermvector.Count);
+		}
+		
+		[Test]
+        public virtual void  TestTerms()
+		{
+			TermEnum terms = reader.Terms();
+			Assert.IsTrue(terms != null);
+			while (terms.Next() == true)
+			{
+				Term term = terms.Term();
+				Assert.IsTrue(term != null);
+				//System.out.println("Term: " + term);
+				System.String fieldValue = (System.String) DocHelper.nameValues[term.Field()];
+				Assert.IsTrue(fieldValue.IndexOf(term.Text()) != - 1);
+			}
+			
+			TermDocs termDocs = reader.TermDocs();
+			Assert.IsTrue(termDocs != null);
+			termDocs.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "field"));
+			Assert.IsTrue(termDocs.Next() == true);
+			
+			termDocs.Seek(new Term(DocHelper.NO_NORMS_KEY, DocHelper.NO_NORMS_TEXT));
+			Assert.IsTrue(termDocs.Next() == true);
+			
+			
+			TermPositions positions = reader.TermPositions();
+			positions.Seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "field"));
+			Assert.IsTrue(positions != null);
+			Assert.IsTrue(positions.Doc() == 0);
+			Assert.IsTrue(positions.NextPosition() >= 0);
 		}
 		
-        [Test]
-		public virtual void  TestNorms()
+		[Test]
+        public virtual void  TestNorms()
 		{
 			//TODO: Not sure how these work/should be tested
 			/*
@@ -208,35 +175,58 @@
 			e.printStackTrace();
 			Assert.IsTrue(false);
 			}*/
+			
+			CheckNorms(reader);
 		}
 		
-        [Test]
-		public virtual void  TestTermVectors()
+		public static void  CheckNorms(IndexReader reader)
 		{
-			try
+			// test omit norms
+			for (int i = 0; i < DocHelper.fields.Length; i++)
 			{
-				TermFreqVector result = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
-				Assert.IsTrue(result != null);
-				System.String[] terms = result.GetTerms();
-				int[] freqs = result.GetTermFrequencies();
-				Assert.IsTrue(terms != null && terms.Length == 3 && freqs != null && freqs.Length == 3);
-				for (int i = 0; i < terms.Length; i++)
-				{
-					System.String term = terms[i];
-					int freq = freqs[i];
-					Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != - 1);
-					Assert.IsTrue(freq > 0);
+				Field f = DocHelper.fields[i];
+				if (f.IsIndexed())
+				{
+					Assert.AreEqual(reader.HasNorms(f.Name()), !f.GetOmitNorms());
+					Assert.AreEqual(reader.HasNorms(f.Name()), !DocHelper.noNorms.Contains(f.Name()));
+					if (!reader.HasNorms(f.Name()))
+					{
+						// test for fake norms of 1.0
+						byte[] norms = reader.Norms(f.Name());
+						Assert.AreEqual(norms.Length, reader.MaxDoc());
+						for (int j = 0; j < reader.MaxDoc(); j++)
+						{
+							Assert.AreEqual(norms[j], DefaultSimilarity.EncodeNorm(1.0f));
+						}
+						norms = new byte[reader.MaxDoc()];
+						reader.Norms(f.Name(), norms, 0);
+						for (int j = 0; j < reader.MaxDoc(); j++)
+						{
+							Assert.AreEqual(norms[j], DefaultSimilarity.EncodeNorm(1.0f));
+						}
+					}
 				}
-				
-				TermFreqVector[] results = reader.GetTermFreqVectors(0);
-				Assert.IsTrue(results != null);
-				Assert.IsTrue(results.Length == 2);
 			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+		}
+		
+		public virtual void  TestTermVectors()
+		{
+			TermFreqVector result = reader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
+			Assert.IsTrue(result != null);
+			System.String[] terms = result.GetTerms();
+			int[] freqs = result.GetTermFrequencies();
+			Assert.IsTrue(terms != null && terms.Length == 3 && freqs != null && freqs.Length == 3);
+			for (int i = 0; i < terms.Length; i++)
+			{
+				System.String term = terms[i];
+				int freq = freqs[i];
+				Assert.IsTrue(DocHelper.FIELD_2_TEXT.IndexOf(term) != - 1);
+				Assert.IsTrue(freq > 0);
 			}
+			
+			TermFreqVector[] results = reader.GetTermFreqVectors(0);
+			Assert.IsTrue(results != null);
+			Assert.IsTrue(results.Length == 2);
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentTermDocs.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermDocs.cs Sat Jun  3 19:41:13 2006
@@ -13,75 +13,69 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Similarity = Lucene.Net.Search.Similarity;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
 	public class TestSegmentTermDocs
 	{
-		private Document testDoc = new Document();
+		private Lucene.Net.Documents.Document testDoc = new Lucene.Net.Documents.Document();
 		private Directory dir = new RAMDirectory();
 		
-        [TestFixtureSetUp]
-		protected virtual void  SetUp()
+		
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
 			DocHelper.SetupDoc(testDoc);
 			DocHelper.WriteDoc(dir, testDoc);
 		}
 		
 		[TestFixtureTearDown]
-		protected virtual void  TearDown()
+		public virtual void  TearDown()
 		{
 			
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			Assert.IsTrue(dir != null);
 		}
 		
-        [Test]
-		public virtual void  TestTermDocs()
+		[Test]
+        public virtual void  TestTermDocs()
 		{
-			try
-			{
-				//After adding the document, we should be able to read it back in
-				SegmentReader reader = new SegmentReader(new SegmentInfo("test", 1, dir));
-				Assert.IsTrue(reader != null);
-				SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
-				Assert.IsTrue(segTermDocs != null);
-				segTermDocs.Seek(new Term(DocHelper.TEXT_FIELD_2_KEY, "Field"));
-				if (segTermDocs.Next() == true)
-				{
-					int docId = segTermDocs.Doc();
-					Assert.IsTrue(docId == 0);
-					int freq = segTermDocs.Freq();
-					Assert.IsTrue(freq == 3);
-				}
-				reader.Close();
-			}
-			catch (System.IO.IOException e)
-			{
-				Assert.IsTrue(false);
+			//After adding the document, we should be able to read it back in
+			SegmentReader reader = SegmentReader.Get(new SegmentInfo("test", 1, dir));
+			Assert.IsTrue(reader != null);
+			SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
+			Assert.IsTrue(segTermDocs != null);
+			segTermDocs.Seek(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));
+			if (segTermDocs.Next() == true)
+			{
+				int docId = segTermDocs.Doc();
+				Assert.IsTrue(docId == 0);
+				int freq = segTermDocs.Freq();
+				Assert.IsTrue(freq == 3);
 			}
+			reader.Close();
 		}
 		
-        [Test]
-		public virtual void  TestBadSeek()
+		[Test]
+        public virtual void  TestBadSeek()
 		{
-			try
 			{
 				//After adding the document, we should be able to read it back in
-				SegmentReader reader = new SegmentReader(new SegmentInfo("test", 3, dir));
+				SegmentReader reader = SegmentReader.Get(new SegmentInfo("test", 3, dir));
 				Assert.IsTrue(reader != null);
 				SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
 				Assert.IsTrue(segTermDocs != null);
@@ -89,14 +83,9 @@
 				Assert.IsTrue(segTermDocs.Next() == false);
 				reader.Close();
 			}
-			catch (System.IO.IOException e)
-			{
-				Assert.IsTrue(false);
-			}
-			try
 			{
 				//After adding the document, we should be able to read it back in
-				SegmentReader reader = new SegmentReader(new SegmentInfo("test", 3, dir));
+				SegmentReader reader = SegmentReader.Get(new SegmentInfo("test", 3, dir));
 				Assert.IsTrue(reader != null);
 				SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
 				Assert.IsTrue(segTermDocs != null);
@@ -104,149 +93,138 @@
 				Assert.IsTrue(segTermDocs.Next() == false);
 				reader.Close();
 			}
-			catch (System.IO.IOException e)
-			{
-				Assert.IsTrue(false);
-			}
 		}
 		
-        [Test]
-		public virtual void  TestSkipTo()
+		[Test]
+        public virtual void  TestSkipTo()
 		{
-			try
-			{
-				Directory dir = new RAMDirectory();
-				IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-				
-				Term ta = new Term("content", "aaa");
-				for (int i = 0; i < 10; i++)
-					AddDoc(writer, "aaa aaa aaa aaa");
-				
-				Term tb = new Term("content", "bbb");
-				for (int i = 0; i < 16; i++)
-					AddDoc(writer, "bbb bbb bbb bbb");
-				
-				Term tc = new Term("content", "ccc");
-				for (int i = 0; i < 50; i++)
-					AddDoc(writer, "ccc ccc ccc ccc");
-				
-				// assure that we deal with a single segment  
-				writer.Optimize();
-				writer.Close();
-				
-				IndexReader reader = IndexReader.Open(dir);
-				TermDocs tdocs = reader.TermDocs();
-				
-				// without optimization (assumption skipInterval == 16)
-				
-				// with next
-				tdocs.Seek(ta);
-				Assert.IsTrue(tdocs.Next());
-				Assert.AreEqual(0, tdocs.Doc());
-				Assert.AreEqual(4, tdocs.Freq());
-				Assert.IsTrue(tdocs.Next());
-				Assert.AreEqual(1, tdocs.Doc());
-				Assert.AreEqual(4, tdocs.Freq());
-				Assert.IsTrue(tdocs.SkipTo(0));
-				Assert.AreEqual(2, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(4));
-				Assert.AreEqual(4, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(9));
-				Assert.AreEqual(9, tdocs.Doc());
-				Assert.IsFalse(tdocs.SkipTo(10));
-				
-				// without next
-				tdocs.Seek(ta);
-				Assert.IsTrue(tdocs.SkipTo(0));
-				Assert.AreEqual(0, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(4));
-				Assert.AreEqual(4, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(9));
-				Assert.AreEqual(9, tdocs.Doc());
-				Assert.IsFalse(tdocs.SkipTo(10));
-				
-				// exactly skipInterval documents and therefore with optimization
-				
-				// with next
-				tdocs.Seek(tb);
-				Assert.IsTrue(tdocs.Next());
-				Assert.AreEqual(10, tdocs.Doc());
-				Assert.AreEqual(4, tdocs.Freq());
-				Assert.IsTrue(tdocs.Next());
-				Assert.AreEqual(11, tdocs.Doc());
-				Assert.AreEqual(4, tdocs.Freq());
-				Assert.IsTrue(tdocs.SkipTo(5));
-				Assert.AreEqual(12, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(15));
-				Assert.AreEqual(15, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(24));
-				Assert.AreEqual(24, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(25));
-				Assert.AreEqual(25, tdocs.Doc());
-				Assert.IsFalse(tdocs.SkipTo(26));
-				
-				// without next
-				tdocs.Seek(tb);
-				Assert.IsTrue(tdocs.SkipTo(5));
-				Assert.AreEqual(10, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(15));
-				Assert.AreEqual(15, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(24));
-				Assert.AreEqual(24, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(25));
-				Assert.AreEqual(25, tdocs.Doc());
-				Assert.IsFalse(tdocs.SkipTo(26));
-				
-				// much more than skipInterval documents and therefore with optimization
-				
-				// with next
-				tdocs.Seek(tc);
-				Assert.IsTrue(tdocs.Next());
-				Assert.AreEqual(26, tdocs.Doc());
-				Assert.AreEqual(4, tdocs.Freq());
-				Assert.IsTrue(tdocs.Next());
-				Assert.AreEqual(27, tdocs.Doc());
-				Assert.AreEqual(4, tdocs.Freq());
-				Assert.IsTrue(tdocs.SkipTo(5));
-				Assert.AreEqual(28, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(40));
-				Assert.AreEqual(40, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(57));
-				Assert.AreEqual(57, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(74));
-				Assert.AreEqual(74, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(75));
-				Assert.AreEqual(75, tdocs.Doc());
-				Assert.IsFalse(tdocs.SkipTo(76));
-				
-				//without next
-				tdocs.Seek(tc);
-				Assert.IsTrue(tdocs.SkipTo(5));
-				Assert.AreEqual(26, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(40));
-				Assert.AreEqual(40, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(57));
-				Assert.AreEqual(57, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(74));
-				Assert.AreEqual(74, tdocs.Doc());
-				Assert.IsTrue(tdocs.SkipTo(75));
-				Assert.AreEqual(75, tdocs.Doc());
-				Assert.IsFalse(tdocs.SkipTo(76));
-				
-				tdocs.Close();
-				reader.Close();
-				dir.Close();
-			}
-			catch (System.IO.IOException e)
-			{
-				Assert.IsTrue(false);
-			}
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
+			
+			Term ta = new Term("content", "aaa");
+			for (int i = 0; i < 10; i++)
+				AddDoc(writer, "aaa aaa aaa aaa");
+			
+			Term tb = new Term("content", "bbb");
+			for (int i = 0; i < 16; i++)
+				AddDoc(writer, "bbb bbb bbb bbb");
+			
+			Term tc = new Term("content", "ccc");
+			for (int i = 0; i < 50; i++)
+				AddDoc(writer, "ccc ccc ccc ccc");
+			
+			// assure that we deal with a single segment  
+			writer.Optimize();
+			writer.Close();
+			
+			IndexReader reader = IndexReader.Open(dir);
+			TermDocs tdocs = reader.TermDocs();
+			
+			// without optimization (assumption skipInterval == 16)
+			
+			// with next
+			tdocs.Seek(ta);
+			Assert.IsTrue(tdocs.Next());
+			Assert.AreEqual(0, tdocs.Doc());
+			Assert.AreEqual(4, tdocs.Freq());
+			Assert.IsTrue(tdocs.Next());
+			Assert.AreEqual(1, tdocs.Doc());
+			Assert.AreEqual(4, tdocs.Freq());
+			Assert.IsTrue(tdocs.SkipTo(0));
+			Assert.AreEqual(2, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(4));
+			Assert.AreEqual(4, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(9));
+			Assert.AreEqual(9, tdocs.Doc());
+			Assert.IsFalse(tdocs.SkipTo(10));
+			
+			// without next
+			tdocs.Seek(ta);
+			Assert.IsTrue(tdocs.SkipTo(0));
+			Assert.AreEqual(0, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(4));
+			Assert.AreEqual(4, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(9));
+			Assert.AreEqual(9, tdocs.Doc());
+			Assert.IsFalse(tdocs.SkipTo(10));
+			
+			// exactly skipInterval documents and therefore with optimization
+			
+			// with next
+			tdocs.Seek(tb);
+			Assert.IsTrue(tdocs.Next());
+			Assert.AreEqual(10, tdocs.Doc());
+			Assert.AreEqual(4, tdocs.Freq());
+			Assert.IsTrue(tdocs.Next());
+			Assert.AreEqual(11, tdocs.Doc());
+			Assert.AreEqual(4, tdocs.Freq());
+			Assert.IsTrue(tdocs.SkipTo(5));
+			Assert.AreEqual(12, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(15));
+			Assert.AreEqual(15, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(24));
+			Assert.AreEqual(24, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(25));
+			Assert.AreEqual(25, tdocs.Doc());
+			Assert.IsFalse(tdocs.SkipTo(26));
+			
+			// without next
+			tdocs.Seek(tb);
+			Assert.IsTrue(tdocs.SkipTo(5));
+			Assert.AreEqual(10, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(15));
+			Assert.AreEqual(15, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(24));
+			Assert.AreEqual(24, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(25));
+			Assert.AreEqual(25, tdocs.Doc());
+			Assert.IsFalse(tdocs.SkipTo(26));
+			
+			// much more than skipInterval documents and therefore with optimization
+			
+			// with next
+			tdocs.Seek(tc);
+			Assert.IsTrue(tdocs.Next());
+			Assert.AreEqual(26, tdocs.Doc());
+			Assert.AreEqual(4, tdocs.Freq());
+			Assert.IsTrue(tdocs.Next());
+			Assert.AreEqual(27, tdocs.Doc());
+			Assert.AreEqual(4, tdocs.Freq());
+			Assert.IsTrue(tdocs.SkipTo(5));
+			Assert.AreEqual(28, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(40));
+			Assert.AreEqual(40, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(57));
+			Assert.AreEqual(57, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(74));
+			Assert.AreEqual(74, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(75));
+			Assert.AreEqual(75, tdocs.Doc());
+			Assert.IsFalse(tdocs.SkipTo(76));
+			
+			//without next
+			tdocs.Seek(tc);
+			Assert.IsTrue(tdocs.SkipTo(5));
+			Assert.AreEqual(26, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(40));
+			Assert.AreEqual(40, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(57));
+			Assert.AreEqual(57, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(74));
+			Assert.AreEqual(74, tdocs.Doc());
+			Assert.IsTrue(tdocs.SkipTo(75));
+			Assert.AreEqual(75, tdocs.Doc());
+			Assert.IsFalse(tdocs.SkipTo(76));
+			
+			tdocs.Close();
+			reader.Close();
+			dir.Close();
 		}
 		
 		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
 		{
-			Document doc = new Document();
-			doc.Add(Field.UnStored("content", value_Renamed));
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.TOKENIZED));
 			writer.AddDocument(doc);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestSegmentTermEnum.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestSegmentTermEnum.cs Sat Jun  3 19:41:13 2006
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -20,6 +21,7 @@
 using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
 	
@@ -30,48 +32,34 @@
 	{
 		internal Directory dir = new RAMDirectory();
 		
-        [Test]
-		public virtual void  TestTermEnum()
+		[Test]
+        public virtual void  TestTermEnum()
 		{
 			IndexWriter writer = null;
 			
-			try
-			{
-				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
-				
-				// add 100 documents with term : aaa
-				// add 100 documents with terms: aaa bbb
-				// Therefore, term 'aaa' has document frequency of 200 and term 'bbb' 100
-				for (int i = 0; i < 100; i++)
-				{
-					AddDoc(writer, "aaa");
-					AddDoc(writer, "aaa bbb");
-				}
-				
-				writer.Close();
-			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-			}
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
 			
-			try
+			// add 100 documents with term : aaa
+			// add 100 documents with terms: aaa bbb
+			// Therefore, term 'aaa' has document frequency of 200 and term 'bbb' 100
+			for (int i = 0; i < 100; i++)
 			{
-				// verify document frequency of terms in an unoptimized index
-				VerifyDocFreq();
-				
-				// merge segments by optimizing the index
-				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
-				writer.Optimize();
-				writer.Close();
-				
-				// verify document frequency of terms in an optimized index
-				VerifyDocFreq();
-			}
-			catch (System.IO.IOException e2)
-			{
-				System.Console.Error.WriteLine(e2.StackTrace);
+				AddDoc(writer, "aaa");
+				AddDoc(writer, "aaa bbb");
 			}
+			
+			writer.Close();
+			
+			// verify document frequency of terms in an unoptimized index
+			VerifyDocFreq();
+			
+			// merge segments by optimizing the index
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
+			writer.Optimize();
+			writer.Close();
+			
+			// verify document frequency of terms in an optimized index
+			VerifyDocFreq();
 		}
 		
 		private void  VerifyDocFreq()
@@ -111,17 +99,9 @@
 		
 		private void  AddDoc(IndexWriter writer, System.String value_Renamed)
 		{
-			Document doc = new Document();
-			doc.Add(Field.UnStored("content", value_Renamed));
-			
-			try
-			{
-				writer.AddDocument(doc);
-			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-			}
+			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			doc.Add(new Field("content", value_Renamed, Field.Store.NO, Field.Index.TOKENIZED));
+			writer.AddDocument(doc);
 		}
 	}
 }

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsReader.cs?rev=411501&r1=411500&r2=411501&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs Sat Jun  3 19:41:13 2006
@@ -13,99 +13,203 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 using System;
 using NUnit.Framework;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
 namespace Lucene.Net.Index
 {
 	[TestFixture]
 	public class TestTermVectorsReader
 	{
+		private void  InitBlock()
+		{
+			positions = new int[testTerms.Length][];
+			offsets = new TermVectorOffsetInfo[testTerms.Length][];
+		}
 		private TermVectorsWriter writer = null;
 		//Must be lexicographically sorted, will do in setup, versus trying to maintain here
 		private System.String[] testFields = new System.String[]{"f1", "f2", "f3"};
+		private bool[] testFieldsStorePos = new bool[]{true, false, true, false};
+		private bool[] testFieldsStoreOff = new bool[]{true, false, false, true};
 		private System.String[] testTerms = new System.String[]{"this", "is", "a", "test"};
+		private int[][] positions;
+		private TermVectorOffsetInfo[][] offsets;
 		private RAMDirectory dir = new RAMDirectory();
 		private System.String seg = "testSegment";
 		private FieldInfos fieldInfos = new FieldInfos();
 		
-        [TestFixtureSetUp]
-		protected virtual void  SetUp()
+        public TestTermVectorsReader()
+        {
+            InitBlock();
+        }
+
+        public TestTermVectorsReader(System.String s)
+		{
+			InitBlock();
+		}
+		
+		[TestFixtureSetUp]
+        public virtual void  SetUp()
 		{
 			for (int i = 0; i < testFields.Length; i++)
 			{
-				fieldInfos.Add(testFields[i], true, true);
+				fieldInfos.Add(testFields[i], true, true, testFieldsStorePos[i], testFieldsStoreOff[i]);
 			}
 			
-			try
+			for (int i = 0; i < testTerms.Length; i++)
 			{
-				System.Array.Sort(testTerms);
-				for (int j = 0; j < 5; j++)
+				positions[i] = new int[3];
+				for (int j = 0; j < positions[i].Length; j++)
 				{
-					writer = new TermVectorsWriter(dir, seg, fieldInfos);
-					writer.OpenDocument();
-					
-					for (int k = 0; k < testFields.Length; k++)
-					{
-						writer.OpenField(testFields[k]);
-						for (int i = 0; i < testTerms.Length; i++)
-						{
-							writer.AddTerm(testTerms[i], i);
-						}
-						writer.CloseField();
-					}
-					writer.CloseDocument();
-					writer.Close();
+					// poditions are always sorted in increasing order
+					positions[i][j] = (int) (j * 10 + (new System.Random().NextDouble()) * 10);
+				}
+				offsets[i] = new TermVectorOffsetInfo[3];
+				for (int j = 0; j < offsets[i].Length; j++)
+				{
+					// ofsets are alway sorted in increasing order
+					offsets[i][j] = new TermVectorOffsetInfo(j * 10, j * 10 + testTerms[i].Length);
 				}
 			}
-			catch (System.IO.IOException e)
+			System.Array.Sort(testTerms);
+			for (int j = 0; j < 5; j++)
 			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+				writer = new TermVectorsWriter(dir, seg, fieldInfos);
+				writer.OpenDocument();
+				
+				for (int k = 0; k < testFields.Length; k++)
+				{
+					writer.OpenField(testFields[k]);
+					for (int i = 0; i < testTerms.Length; i++)
+					{
+						writer.AddTerm(testTerms[i], 3, positions[i], offsets[i]);
+					}
+					writer.CloseField();
+				}
+				writer.CloseDocument();
+				writer.Close();
 			}
 		}
 		
-        [TestFixtureTearDown]
-		protected virtual void  TearDown()
+		[TestFixtureTearDown]
+        public virtual void  TearDown()
 		{
 			
 		}
 		
-        [Test]
-		public virtual void  Test()
+		[Test]
+        public virtual void  Test()
 		{
 			//Check to see the files were created properly in setup
 			Assert.IsTrue(writer.IsDocumentOpen() == false);
-			Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TVD_EXTENSION));
-			Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TVX_EXTENSION));
+			Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvdExtension));
+			Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvxExtension));
 		}
 		
-        [Test]
-		public virtual void  TestReader()
+		[Test]
+        public virtual void  TestReader()
 		{
-			try
-			{
-				TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
-				Assert.IsTrue(reader != null);
-				TermFreqVector vector = reader.Get(0, testFields[0]);
-				Assert.IsTrue(vector != null);
-				System.String[] terms = vector.GetTerms();
-				Assert.IsTrue(terms != null);
-				Assert.IsTrue(terms.Length == testTerms.Length);
-				for (int i = 0; i < terms.Length; i++)
-				{
-					System.String term = terms[i];
-					//System.out.println("Term: " + term);
-					Assert.IsTrue(term.Equals(testTerms[i]));
+			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+			Assert.IsTrue(reader != null);
+			TermFreqVector vector = reader.Get(0, testFields[0]);
+			Assert.IsTrue(vector != null);
+			System.String[] terms = vector.GetTerms();
+			Assert.IsTrue(terms != null);
+			Assert.IsTrue(terms.Length == testTerms.Length);
+			for (int i = 0; i < terms.Length; i++)
+			{
+				System.String term = terms[i];
+				//System.out.println("Term: " + term);
+				Assert.IsTrue(term.Equals(testTerms[i]));
+			}
+		}
+		
+		[Test]
+        public virtual void  TestPositionReader()
+		{
+			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+			Assert.IsTrue(reader != null);
+			TermPositionVector vector;
+			System.String[] terms;
+			vector = (TermPositionVector) reader.Get(0, testFields[0]);
+			Assert.IsTrue(vector != null);
+			terms = vector.GetTerms();
+			Assert.IsTrue(terms != null);
+			Assert.IsTrue(terms.Length == testTerms.Length);
+			for (int i = 0; i < terms.Length; i++)
+			{
+				System.String term = terms[i];
+				//System.out.println("Term: " + term);
+				Assert.IsTrue(term.Equals(testTerms[i]));
+				int[] positions = vector.GetTermPositions(i);
+				Assert.IsTrue(positions != null);
+				Assert.IsTrue(positions.Length == this.positions[i].Length);
+				for (int j = 0; j < positions.Length; j++)
+				{
+					int position = positions[j];
+					Assert.IsTrue(position == this.positions[i][j]);
+				}
+				TermVectorOffsetInfo[] offset = vector.GetOffsets(i);
+				Assert.IsTrue(offset != null);
+				Assert.IsTrue(offset.Length == this.offsets[i].Length);
+				for (int j = 0; j < offset.Length; j++)
+				{
+					TermVectorOffsetInfo termVectorOffsetInfo = offset[j];
+					Assert.IsTrue(termVectorOffsetInfo.Equals(offsets[i][j]));
 				}
 			}
-			catch (System.IO.IOException e)
-			{
-				System.Console.Error.WriteLine(e.StackTrace);
-				Assert.IsTrue(false);
+			
+			TermFreqVector freqVector = reader.Get(0, testFields[1]); //no pos, no offset
+			Assert.IsTrue(freqVector != null);
+			Assert.IsTrue(freqVector is TermPositionVector == false);
+			terms = freqVector.GetTerms();
+			Assert.IsTrue(terms != null);
+			Assert.IsTrue(terms.Length == testTerms.Length);
+			for (int i = 0; i < terms.Length; i++)
+			{
+				System.String term = terms[i];
+				//System.out.println("Term: " + term);
+				Assert.IsTrue(term.Equals(testTerms[i]));
+			}
+		}
+		
+		[Test]
+        public virtual void  TestOffsetReader()
+		{
+			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+			Assert.IsTrue(reader != null);
+			TermPositionVector vector = (TermPositionVector) reader.Get(0, testFields[0]);
+			Assert.IsTrue(vector != null);
+			System.String[] terms = vector.GetTerms();
+			Assert.IsTrue(terms != null);
+			Assert.IsTrue(terms.Length == testTerms.Length);
+			for (int i = 0; i < terms.Length; i++)
+			{
+				System.String term = terms[i];
+				//System.out.println("Term: " + term);
+				Assert.IsTrue(term.Equals(testTerms[i]));
+				int[] positions = vector.GetTermPositions(i);
+				Assert.IsTrue(positions != null);
+				Assert.IsTrue(positions.Length == this.positions[i].Length);
+				for (int j = 0; j < positions.Length; j++)
+				{
+					int position = positions[j];
+					Assert.IsTrue(position == this.positions[i][j]);
+				}
+				TermVectorOffsetInfo[] offset = vector.GetOffsets(i);
+				Assert.IsTrue(offset != null);
+				Assert.IsTrue(offset.Length == this.offsets[i].Length);
+				for (int j = 0; j < offset.Length; j++)
+				{
+					TermVectorOffsetInfo termVectorOffsetInfo = offset[j];
+					Assert.IsTrue(termVectorOffsetInfo.Equals(offsets[i][j]));
+				}
 			}
 		}
 		
+		
 		/// <summary> Make sure exceptions and bad params are handled appropriately</summary>
 		[Test]
         public virtual void  TestBadParams()
@@ -114,25 +218,37 @@
 			{
 				TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
 				Assert.IsTrue(reader != null);
-				//Bad document number, good Field number
-				TermFreqVector vector = reader.Get(50, testFields[0]);
-				Assert.IsTrue(vector == null);
+				//Bad document number, good field number
+				reader.Get(50, testFields[0]);
+				Assert.Fail();
 			}
-			catch (System.Exception e)
+			catch (System.IO.IOException e)
 			{
-				Assert.IsTrue(false);
+				// expected exception
 			}
 			try
 			{
 				TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
 				Assert.IsTrue(reader != null);
-				//good document number, bad Field number
+				//Bad document number, no field
+				reader.Get(50);
+				Assert.Fail();
+			}
+			catch (System.IO.IOException e)
+			{
+				// expected exception
+			}
+			try
+			{
+				TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+				Assert.IsTrue(reader != null);
+				//good document number, bad field number
 				TermFreqVector vector = reader.Get(0, "f50");
 				Assert.IsTrue(vector == null);
 			}
-			catch (System.Exception e)
+			catch (System.IO.IOException e)
 			{
-				Assert.IsTrue(false);
+				Assert.Fail();
 			}
 		}
 	}



Mime
View raw message