lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject svn commit: r1299911 [11/14] - in /incubator/lucene.net/trunk: src/core/ src/core/Analysis/ src/core/Analysis/Standard/ src/core/Analysis/Tokenattributes/ src/core/Document/ src/core/Index/ src/core/Messages/ src/core/QueryParser/ src/core/Search/ src/...
Date Mon, 12 Mar 2012 22:29:37 GMT
Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterDelete.cs Mon Mar 12 22:29:26 2012
@@ -146,7 +146,7 @@ namespace Lucene.Net.Index
 
             Directory dir = new MockRAMDirectory();
             IndexWriter modifier = new IndexWriter(dir,  new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
-            modifier.SetUseCompoundFile(true);
+            modifier.UseCompoundFile = true;
             modifier.SetMaxBufferedDeleteTerms(1);
 
             for (int i = 0; i < keywords.Length; i++)
@@ -198,7 +198,7 @@ namespace Lucene.Net.Index
             modifier.Commit();
 
             IndexReader reader = IndexReader.Open(dir, true);
-            Assert.AreEqual(7, reader.NumDocs());
+            Assert.AreEqual(7, reader.NumDocs);
             reader.Close();
 
             modifier.DeleteDocuments(new Term("value", System.Convert.ToString(value_Renamed)));
@@ -206,7 +206,7 @@ namespace Lucene.Net.Index
             modifier.Commit();
 
             reader = IndexReader.Open(dir, true);
-            Assert.AreEqual(0, reader.NumDocs());
+            Assert.AreEqual(0, reader.NumDocs);
             reader.Close();
             modifier.Close();
             dir.Close();
@@ -263,7 +263,7 @@ namespace Lucene.Net.Index
                 modifier.Commit();
 
                 IndexReader reader = IndexReader.Open(dir, true);
-                Assert.AreEqual(1, reader.NumDocs());
+                Assert.AreEqual(1, reader.NumDocs);
 
                 int hitCount = GetHitCount(dir, new Term("id", System.Convert.ToString(id)));
                 Assert.AreEqual(1, hitCount);
@@ -306,7 +306,7 @@ namespace Lucene.Net.Index
             modifier.Commit();
 
             IndexReader reader = IndexReader.Open(dir, true);
-            Assert.AreEqual(5, reader.NumDocs());
+            Assert.AreEqual(5, reader.NumDocs);
             modifier.Close();
         }
 
@@ -330,7 +330,7 @@ namespace Lucene.Net.Index
             modifier.Commit();
 
             IndexReader reader = IndexReader.Open(dir, true);
-            Assert.AreEqual(7, reader.NumDocs());
+            Assert.AreEqual(7, reader.NumDocs);
             reader.Close();
 
             id = 0;
@@ -340,7 +340,7 @@ namespace Lucene.Net.Index
             modifier.Commit();
 
             reader = IndexReader.Open(dir, true);
-            Assert.AreEqual(5, reader.NumDocs());
+            Assert.AreEqual(5, reader.NumDocs);
             reader.Close();
 
             Term[] terms = new Term[3];
@@ -351,7 +351,7 @@ namespace Lucene.Net.Index
             modifier.DeleteDocuments(terms);
             modifier.Commit();
             reader = IndexReader.Open(dir, true);
-            Assert.AreEqual(2, reader.NumDocs());
+            Assert.AreEqual(2, reader.NumDocs);
             reader.Close();
 
             modifier.Close();
@@ -377,7 +377,7 @@ namespace Lucene.Net.Index
 		    modifier.Commit();
 
 		    IndexReader reader = IndexReader.Open(dir, true);
-		    Assert.AreEqual(7, reader.NumDocs());
+		    Assert.AreEqual(7, reader.NumDocs);
 		    reader.Close();
 
 		    // Add 1 doc (so we will have something buffered)
@@ -388,7 +388,7 @@ namespace Lucene.Net.Index
 
 		    // Delete all shouldn't be on disk yet
 		    reader = IndexReader.Open(dir, true);
-		    Assert.AreEqual(7, reader.NumDocs());
+		    Assert.AreEqual(7, reader.NumDocs);
 		    reader.Close();
 
 		    // Add a doc and update a doc (after the deleteAll, before the commit)
@@ -400,7 +400,7 @@ namespace Lucene.Net.Index
 
 		    // Validate there are no docs left
 		    reader = IndexReader.Open(dir, true);
-		    Assert.AreEqual(2, reader.NumDocs());
+		    Assert.AreEqual(2, reader.NumDocs);
 		    reader.Close();
 
 		    modifier.Close();
@@ -428,7 +428,7 @@ namespace Lucene.Net.Index
 			AddDoc(modifier, ++id, value_Renamed);
 
 		    IndexReader reader = IndexReader.Open(dir, true);
-			Assert.AreEqual(7, reader.NumDocs());
+			Assert.AreEqual(7, reader.NumDocs);
 			reader.Close();
 			
 			// Delete all
@@ -440,7 +440,7 @@ namespace Lucene.Net.Index
 			
 			// Validate that the docs are still there
 		    reader = IndexReader.Open(dir, true);
-			Assert.AreEqual(7, reader.NumDocs());
+			Assert.AreEqual(7, reader.NumDocs);
 			reader.Close();
 			
 			dir.Close();
@@ -466,7 +466,7 @@ namespace Lucene.Net.Index
 			modifier.Commit();
 			
 			IndexReader reader = modifier.GetReader();
-			Assert.AreEqual(7, reader.NumDocs());
+			Assert.AreEqual(7, reader.NumDocs);
 			reader.Close();
 			
 			AddDoc(modifier, ++id, value_Renamed);
@@ -476,7 +476,7 @@ namespace Lucene.Net.Index
 			modifier.DeleteAll();
 			
 			reader = modifier.GetReader();
-			Assert.AreEqual(0, reader.NumDocs());
+			Assert.AreEqual(0, reader.NumDocs);
 			reader.Close();
 			
 			
@@ -486,7 +486,7 @@ namespace Lucene.Net.Index
 			
 			// Validate that the docs are still there
 		    reader = IndexReader.Open(dir, true);
-			Assert.AreEqual(7, reader.NumDocs());
+			Assert.AreEqual(7, reader.NumDocs);
 			reader.Close();
 			
 			dir.Close();
@@ -768,7 +768,7 @@ namespace Lucene.Net.Index
 
             MockRAMDirectory dir = new MockRAMDirectory();
             IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
-            modifier.SetUseCompoundFile(true);
+            modifier.UseCompoundFile = true;
             modifier.SetMaxBufferedDeleteTerms(2);
 
             dir.FailOn(failure.Reset());

Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterExceptions.cs Mon Mar 12 22:29:26 2012
@@ -198,7 +198,7 @@ namespace Lucene.Net.Index
 			MockRAMDirectory dir = new MockRAMDirectory();
             random = new Random((int)(DateTime.Now.Ticks&0x7fffffff));
 			MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
+			((ConcurrentMergeScheduler) writer.MergeScheduler).SetSuppressExceptions();
 			//writer.setMaxBufferedDocs(10);
 			writer.SetRAMBufferSizeMB(0.1);
 			
@@ -247,7 +247,7 @@ namespace Lucene.Net.Index
             random = new Random((int)(DateTime.Now.Ticks & 0x7fffffff));
 			MockRAMDirectory dir = new MockRAMDirectory();
 			MockIndexWriter writer = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
+			((ConcurrentMergeScheduler) writer.MergeScheduler).SetSuppressExceptions();
 			//writer.setMaxBufferedDocs(10);
 			writer.SetRAMBufferSizeMB(0.2);
 			

Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMergePolicy.cs Mon Mar 12 22:29:26 2012
@@ -90,7 +90,7 @@ namespace Lucene.Net.Index
 			writer.SetMaxBufferedDocs(10);
 			writer.SetMergeFactor(10);
 			LogDocMergePolicy mp = new LogDocMergePolicy(writer);
-			mp.SetMinMergeDocs(100);
+			mp.MinMergeDocs = 100;
 			writer.SetMergePolicy(mp);
 			
 			for (int i = 0; i < 100; i++)
@@ -101,7 +101,7 @@ namespace Lucene.Net.Index
 				writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
 				writer.SetMaxBufferedDocs(10);
 				writer.SetMergePolicy(mp);
-				mp.SetMinMergeDocs(100);
+				mp.MinMergeDocs = 100;
 				writer.SetMergeFactor(10);
 				CheckInvariants(writer);
 			}
@@ -183,7 +183,7 @@ namespace Lucene.Net.Index
 				AddDoc(writer);
 			}
 		    writer.Commit();
-		    ((ConcurrentMergeScheduler) writer.GetMergeScheduler()).Sync();
+		    ((ConcurrentMergeScheduler) writer.MergeScheduler).Sync();
 		    writer.Commit();
 			CheckInvariants(writer);
 			
@@ -223,7 +223,7 @@ namespace Lucene.Net.Index
 				AddDoc(writer);
 			}
 		    writer.Commit();
-            ((ConcurrentMergeScheduler)writer.GetMergeScheduler()).Sync();
+            ((ConcurrentMergeScheduler)writer.MergeScheduler).Sync();
 		    writer.Commit();
 			CheckInvariants(writer);
 			Assert.AreEqual(10, writer.MaxDoc());
@@ -243,7 +243,7 @@ namespace Lucene.Net.Index
             writer.WaitForMerges();
 			int maxBufferedDocs = writer.GetMaxBufferedDocs();
 			int mergeFactor = writer.GetMergeFactor();
-			int maxMergeDocs = writer.GetMaxMergeDocs();
+			int maxMergeDocs = writer.MaxMergeDocs;
 			
 			int ramSegmentCount = writer.GetNumBufferedDocuments();
 			Assert.IsTrue(ramSegmentCount < maxBufferedDocs);

Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterMerging.cs Mon Mar 12 22:29:26 2012
@@ -80,16 +80,16 @@ namespace Lucene.Net.Index
 			bool fail = false;
 			IndexReader reader = IndexReader.Open(directory, true);
 			
-			int max = reader.MaxDoc();
+			int max = reader.MaxDoc;
 			for (int i = 0; i < max; i++)
 			{
 				Document temp = reader.Document(i);
 				//System.out.println("doc "+i+"="+temp.getField("count").stringValue());
 				//compare the index doc number to the value that it should be
-				if (!temp.GetField("count").StringValue().Equals((i + startAt) + ""))
+				if (!temp.GetField("count").StringValue.Equals((i + startAt) + ""))
 				{
 					fail = true;
-					System.Console.Out.WriteLine("Document " + (i + startAt) + " is returning document " + temp.GetField("count").StringValue());
+					System.Console.Out.WriteLine("Document " + (i + startAt) + " is returning document " + temp.GetField("count").StringValue);
 				}
 			}
 			reader.Close();

Modified: incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIndexWriterReader.cs Mon Mar 12 22:29:26 2012
@@ -198,28 +198,28 @@ namespace Lucene.Net.Index
 			
 			// get a reader
 			IndexReader r1 = writer.GetReader();
-			Assert.IsTrue(r1.IsCurrent());
+			Assert.IsTrue(r1.IsCurrent);
 			
-			System.String id10 = r1.Document(10).GetField("id").StringValue();
+			System.String id10 = r1.Document(10).GetField("id").StringValue;
 			
 			Document newDoc = r1.Document(10);
 			newDoc.RemoveField("id");
 			newDoc.Add(new Field("id", System.Convert.ToString(8000), Field.Store.YES, Field.Index.NOT_ANALYZED));
 			writer.UpdateDocument(new Term("id", id10), newDoc);
-			Assert.IsFalse(r1.IsCurrent());
+			Assert.IsFalse(r1.IsCurrent);
 			
 			IndexReader r2 = writer.GetReader();
-			Assert.IsTrue(r2.IsCurrent());
+			Assert.IsTrue(r2.IsCurrent);
 			Assert.AreEqual(0, Count(new Term("id", id10), r2));
 			Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r2));
 			
 			r1.Close();
 			writer.Close();
-			Assert.IsTrue(r2.IsCurrent());
+			Assert.IsTrue(r2.IsCurrent);
 
 		    IndexReader r3 = IndexReader.Open(dir1, true);
-			Assert.IsTrue(r3.IsCurrent());
-			Assert.IsTrue(r2.IsCurrent());
+			Assert.IsTrue(r3.IsCurrent);
+			Assert.IsTrue(r2.IsCurrent);
 			Assert.AreEqual(0, Count(new Term("id", id10), r3));
 			Assert.AreEqual(1, Count(new Term("id", System.Convert.ToString(8000)), r3));
 			
@@ -227,13 +227,13 @@ namespace Lucene.Net.Index
 			Document doc = new Document();
 			doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
-			Assert.IsTrue(r2.IsCurrent());
-			Assert.IsTrue(r3.IsCurrent());
+			Assert.IsTrue(r2.IsCurrent);
+			Assert.IsTrue(r3.IsCurrent);
 			
 			writer.Close();
 			
-			Assert.IsFalse(r2.IsCurrent());
-			Assert.IsTrue(!r3.IsCurrent());
+			Assert.IsFalse(r2.IsCurrent);
+			Assert.IsTrue(!r3.IsCurrent);
 			
 			r2.Close();
 			r3.Close();
@@ -265,18 +265,18 @@ namespace Lucene.Net.Index
 			writer2.Close();
 			
 			IndexReader r0 = writer.GetReader();
-			Assert.IsTrue(r0.IsCurrent());
+			Assert.IsTrue(r0.IsCurrent);
 			writer.AddIndexesNoOptimize(new Directory[]{dir2});
-			Assert.IsFalse(r0.IsCurrent());
+			Assert.IsFalse(r0.IsCurrent);
 			r0.Close();
 			
 			IndexReader r1 = writer.GetReader();
-			Assert.IsTrue(r1.IsCurrent());
+			Assert.IsTrue(r1.IsCurrent);
 			
 			writer.Commit();
-			Assert.IsFalse(r1.IsCurrent());
+			Assert.IsFalse(r1.IsCurrent);
 			
-			Assert.AreEqual(200, r1.MaxDoc());
+			Assert.AreEqual(200, r1.MaxDoc);
 			
 			int index2df = r1.DocFreq(new Term("indexname", "index2"));
 			
@@ -315,7 +315,7 @@ namespace Lucene.Net.Index
 			writer.AddIndexesNoOptimize(new Directory[]{dir2});
 			
 			IndexReader r1 = writer.GetReader();
-			Assert.AreEqual(500, r1.MaxDoc());
+			Assert.AreEqual(500, r1.MaxDoc);
 			
 			r1.Close();
 			writer.Close();
@@ -333,7 +333,7 @@ namespace Lucene.Net.Index
 			
 			Directory dir1 = new MockRAMDirectory();
 			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-            writer.SetReaderTermsIndexDivisor(2);
+            writer.ReaderTermsIndexDivisor = 2;
 			writer.SetInfoStream(infoStream);
 			// create the index
 			CreateIndexNoClose(!optimize, "index1", writer);
@@ -341,7 +341,7 @@ namespace Lucene.Net.Index
 			// get a reader
 			IndexReader r1 = writer.GetReader();
 			
-			System.String id10 = r1.Document(10).GetField("id").StringValue();
+			System.String id10 = r1.Document(10).GetField("id").StringValue;
 			
 			// deleted IW docs should not show up in the next getReader
 			writer.DeleteDocuments(new Term("id", id10));
@@ -349,7 +349,7 @@ namespace Lucene.Net.Index
 			Assert.AreEqual(1, Count(new Term("id", id10), r1));
 			Assert.AreEqual(0, Count(new Term("id", id10), r2));
 			
-			System.String id50 = r1.Document(50).GetField("id").StringValue();
+			System.String id50 = r1.Document(50).GetField("id").StringValue;
 			Assert.AreEqual(1, Count(new Term("id", id50), r1));
 			
 			writer.DeleteDocuments(new Term("id", id50));
@@ -358,7 +358,7 @@ namespace Lucene.Net.Index
 			Assert.AreEqual(0, Count(new Term("id", id10), r3));
 			Assert.AreEqual(0, Count(new Term("id", id50), r3));
 			
-			System.String id75 = r1.Document(75).GetField("id").StringValue();
+			System.String id75 = r1.Document(75).GetField("id").StringValue;
 			writer.DeleteDocuments(new TermQuery(new Term("id", id75)));
 			IndexReader r4 = writer.GetReader();
 			Assert.AreEqual(1, Count(new Term("id", id75), r3));
@@ -404,7 +404,7 @@ namespace Lucene.Net.Index
 			_TestUtil.CheckIndex(mainDir);
 
 		    IndexReader reader = IndexReader.Open(mainDir, true);
-			Assert.AreEqual(addDirThreads.count.IntValue(), reader.NumDocs());
+			Assert.AreEqual(addDirThreads.count.IntValue(), reader.NumDocs);
 			//Assert.AreEqual(100 + numDirs * (3 * numIter / 4) * addDirThreads.NUM_THREADS
 			//    * addDirThreads.NUM_INIT_DOCS, reader.numDocs());
 			reader.Close();
@@ -478,7 +478,7 @@ namespace Lucene.Net.Index
 				InitBlock(enclosingInstance);
 				this.mainWriter = mainWriter;
 				IndexReader reader = mainWriter.GetReader();
-				int maxDoc = reader.MaxDoc();
+				int maxDoc = reader.MaxDoc;
 				random = Enclosing_Instance.NewRandom();
 				int iter = random.Next(maxDoc);
 				for (int x = 0; x < iter; x++)
@@ -722,15 +722,15 @@ namespace Lucene.Net.Index
 			IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetInfoStream(infoStream);
 			IndexReader r1 = writer.GetReader();
-			Assert.AreEqual(0, r1.MaxDoc());
+			Assert.AreEqual(0, r1.MaxDoc);
 			CreateIndexNoClose(false, "index1", writer);
 			writer.Flush(!optimize, true, true);
 			
 			IndexReader iwr1 = writer.GetReader();
-			Assert.AreEqual(100, iwr1.MaxDoc());
+			Assert.AreEqual(100, iwr1.MaxDoc);
 			
 			IndexReader r2 = writer.GetReader();
-			Assert.AreEqual(r2.MaxDoc(), 100);
+			Assert.AreEqual(r2.MaxDoc, 100);
 			// add 100 documents
 			for (int x = 10000; x < 10000 + 100; x++)
 			{
@@ -741,11 +741,11 @@ namespace Lucene.Net.Index
 			// verify the reader was reopened internally
 			IndexReader iwr2 = writer.GetReader();
 			Assert.IsTrue(iwr2 != r1);
-			Assert.AreEqual(200, iwr2.MaxDoc());
+			Assert.AreEqual(200, iwr2.MaxDoc);
 			// should have flushed out a segment
 			IndexReader r3 = writer.GetReader();
 			Assert.IsTrue(r2 != r3);
-			Assert.AreEqual(200, r3.MaxDoc());
+			Assert.AreEqual(200, r3.MaxDoc);
 			
 			// dec ref the readers rather than close them because
 			// closing flushes changes to the writer
@@ -760,7 +760,7 @@ namespace Lucene.Net.Index
 			writer = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			IndexReader w2r1 = writer.GetReader();
 			// insure the deletes were actually flushed to the directory
-			Assert.AreEqual(200, w2r1.MaxDoc());
+			Assert.AreEqual(200, w2r1.MaxDoc);
 			w2r1.Close();
 			writer.Close();
 			
@@ -859,7 +859,7 @@ namespace Lucene.Net.Index
 			{
 				writer.AddDocument(CreateDocument(i, "test", 4));
 			}
-			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).Sync();
+			((ConcurrentMergeScheduler) writer.MergeScheduler).Sync();
 			
 			Assert.IsTrue(warmer.warmCount > 0);
 			int count = warmer.warmCount;
@@ -888,13 +888,13 @@ namespace Lucene.Net.Index
 			_TestUtil.CheckIndex(dir1);
 			writer.Commit();
 			_TestUtil.CheckIndex(dir1);
-			Assert.AreEqual(100, r1.NumDocs());
+			Assert.AreEqual(100, r1.NumDocs);
 			
 			for (int i = 0; i < 10; i++)
 			{
 				writer.AddDocument(CreateDocument(i, "test", 4));
 			}
-			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).Sync();
+			((ConcurrentMergeScheduler) writer.MergeScheduler).Sync();
 			
 			IndexReader r2 = r1.Reopen();
 			if (r2 != r1)
@@ -902,7 +902,7 @@ namespace Lucene.Net.Index
 				r1.Close();
 				r1 = r2;
 			}
-			Assert.AreEqual(110, r1.NumDocs());
+			Assert.AreEqual(110, r1.NumDocs);
 			writer.Close();
 			r1.Close();
 			dir1.Close();
@@ -925,7 +925,7 @@ namespace Lucene.Net.Index
 			_TestUtil.CheckIndex(dir1);
 			
 			// reader should remain usable even after IndexWriter is closed:
-			Assert.AreEqual(100, r.NumDocs());
+			Assert.AreEqual(100, r.NumDocs);
 			Query q = new TermQuery(new Term("indexname", "test"));
 			Assert.AreEqual(100, new IndexSearcher(r).Search(q, 10).TotalHits);
 			
@@ -1159,8 +1159,8 @@ namespace Lucene.Net.Index
 			w.Close();
 			r.Close();
 			r = IndexReader.Open(dir, true);
-			Assert.AreEqual(1, r.NumDocs());
-			Assert.IsFalse(r.HasDeletions());
+			Assert.AreEqual(1, r.NumDocs);
+			Assert.IsFalse(r.HasDeletions);
 			r.Close();
 			dir.Close();
 		}
@@ -1180,17 +1180,17 @@ namespace Lucene.Net.Index
             id.SetValue("1");
             w.AddDocument(doc);
             IndexReader r = w.GetReader();
-            Assert.AreEqual(2, r.NumDocs());
+            Assert.AreEqual(2, r.NumDocs);
             r.Close();
 
             w.DeleteDocuments(new Term("id", "0"));
             r = w.GetReader();
-            Assert.AreEqual(1, r.NumDocs());
+            Assert.AreEqual(1, r.NumDocs);
             r.Close();
 
             w.DeleteDocuments(new Term("id", "1"));
             r = w.GetReader();
-            Assert.AreEqual(0, r.NumDocs());
+            Assert.AreEqual(0, r.NumDocs);
             r.Close();
 
             w.Close();

Modified: incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestIsCurrent.cs Mon Mar 12 22:29:26 2012
@@ -75,7 +75,7 @@ namespace Lucene.Net.Index
 
             // assert index has a document and reader is up2date 
             Assert.AreEqual(1, writer.NumDocs(), "One document should be in the index");
-            Assert.IsTrue(reader.IsCurrent(), "Document added, reader should be stale ");
+            Assert.IsTrue(reader.IsCurrent, "Document added, reader should be stale ");
 
             // remove document
             Term idTerm = new Term("UUID", "1");
@@ -84,7 +84,7 @@ namespace Lucene.Net.Index
 
             // assert document has been deleted (index changed), reader is stale
             Assert.AreEqual(0, writer.NumDocs(), "Document should be removed");
-            Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");
+            Assert.IsFalse(reader.IsCurrent, "Reader should be stale");
 
             reader.Close();
         }
@@ -103,7 +103,7 @@ namespace Lucene.Net.Index
 
             // assert index has a document and reader is up2date 
             Assert.AreEqual(1, writer.NumDocs(), "One document should be in the index");
-            Assert.IsTrue(reader.IsCurrent(), "Document added, reader should be stale ");
+            Assert.IsTrue(reader.IsCurrent, "Document added, reader should be stale ");
 
             // remove all documents
             writer.DeleteAll();
@@ -111,7 +111,7 @@ namespace Lucene.Net.Index
 
             // assert document has been deleted (index changed), reader is stale
             Assert.AreEqual(0, writer.NumDocs(), "Document should be removed");
-            Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");
+            Assert.IsFalse(reader.IsCurrent, "Reader should be stale");
 
             reader.Close();
         }

Modified: incubator/lucene.net/trunk/test/core/Index/TestLazyBug.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestLazyBug.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestLazyBug.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestLazyBug.cs Mon Mar 12 22:29:26 2012
@@ -69,7 +69,7 @@ namespace Lucene.Net.Index
 				Analyzer analyzer = new SimpleAnalyzer();
 				IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 				
-				writer.SetUseCompoundFile(false);
+				writer.UseCompoundFile = false;
 				
 				for (int d = 1; d <= NUM_DOCS; d++)
 				{
@@ -105,12 +105,12 @@ namespace Lucene.Net.Index
 				var fields = d.GetFields();
 				for (System.Collections.IEnumerator fi = fields.GetEnumerator(); fi.MoveNext(); )
 				{
-					Fieldable f = null;
+					IFieldable f = null;
 					try
 					{
-						f = (Fieldable) fi.Current;
-						System.String fname = f.Name();
-						System.String fval = f.StringValue();
+						f = (IFieldable) fi.Current;
+						System.String fname = f.Name;
+						System.String fval = f.StringValue;
 						Assert.IsNotNull(docs[i] + " FIELD: " + fname, fval);
 						System.String[] vals = fval.Split('#');
 						if (!dataset.Contains(vals[0]) || !dataset.Contains(vals[1]))
@@ -120,7 +120,7 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Exception e)
 					{
-						throw new Exception(docs[i] + " WTF: " + f.Name(), e);
+						throw new Exception(docs[i] + " WTF: " + f.Name, e);
 					}
 				}
 			}

Modified: incubator/lucene.net/trunk/test/core/Index/TestLazyProxSkipping.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestLazyProxSkipping.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestLazyProxSkipping.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestLazyProxSkipping.cs Mon Mar 12 22:29:26 2012
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
 			
 			Directory directory = new SeekCountingDirectory(this);
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			writer.SetMaxBufferedDocs(10);
 			for (int i = 0; i < numDocs; i++)
 			{
@@ -230,13 +230,13 @@ namespace Lucene.Net.Index
                 }
                 isDisposed = true;
             }
-			
-			public override long GetFilePointer()
-			{
-				return this.input.GetFilePointer();
-			}
-			
-			public override void  Seek(long pos)
+
+		    public override long FilePointer
+		    {
+		        get { return this.input.FilePointer; }
+		    }
+
+		    public override void  Seek(long pos)
 			{
 				Enclosing_Instance.seeksCounter++;
 				this.input.Seek(pos);

Modified: incubator/lucene.net/trunk/test/core/Index/TestMultiLevelSkipList.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestMultiLevelSkipList.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestMultiLevelSkipList.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestMultiLevelSkipList.cs Mon Mar 12 22:29:26 2012
@@ -120,7 +120,7 @@ namespace Lucene.Net.Index
 				bool hasNext = input.IncrementToken();
 				if (hasNext)
 				{
-					payloadAtt.SetPayload(new Payload(new byte[]{(byte) count++}));
+					payloadAtt.Payload = new Payload(new byte[]{(byte) count++});
 				}
 				return hasNext;
 			}
@@ -176,13 +176,13 @@ namespace Lucene.Net.Index
                 }
                 isDisposed = true;
             }
-			
-			public override long GetFilePointer()
-			{
-				return this.input.GetFilePointer();
-			}
-			
-			public override void  Seek(long pos)
+
+		    public override long FilePointer
+		    {
+		        get { return this.input.FilePointer; }
+		    }
+
+		    public override void  Seek(long pos)
 			{
 				this.input.Seek(pos);
 			}

Modified: incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestNRTReaderWithThreads.cs Mon Mar 12 22:29:26 2012
@@ -40,7 +40,7 @@ namespace Lucene.Net.Index
 		{
 			Directory mainDir = new MockRAMDirectory();
 			IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			IndexReader reader = writer.GetReader(); // start pooling readers
 			reader.Close();
 			writer.SetMergeFactor(2);

Modified: incubator/lucene.net/trunk/test/core/Index/TestNorms.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestNorms.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestNorms.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestNorms.cs Mon Mar 12 22:29:26 2012
@@ -187,14 +187,14 @@ namespace Lucene.Net.Index
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
 			iw.SetSimilarity(similarityOne);
-			iw.SetUseCompoundFile(true);
+			iw.UseCompoundFile = true;
 			iw.Close();
 		}
 		
 		private void  ModifyNormsForF1(Directory dir)
 		{
 			IndexReader ir = IndexReader.Open(dir, false);
-			int n = ir.MaxDoc();
+			int n = ir.MaxDoc;
 			for (int i = 0; i < n; i += 3)
 			{
 				// modify for every third doc
@@ -236,7 +236,7 @@ namespace Lucene.Net.Index
 			iw.SetMaxBufferedDocs(5);
 			iw.SetMergeFactor(3);
 			iw.SetSimilarity(similarityOne);
-			iw.SetUseCompoundFile(compound);
+			iw.UseCompoundFile = compound;
 			for (int i = 0; i < ndocs; i++)
 			{
 				iw.AddDocument(NewDoc());
@@ -252,7 +252,7 @@ namespace Lucene.Net.Index
 			for (int i = 0; i < 10; i++)
 			{
 				Field f = new Field("f" + i, "v" + i, Field.Store.NO, Field.Index.NOT_ANALYZED);
-				f.SetBoost(boost);
+				f.Boost = boost;
 				d.Add(f);
 			}
 			return d;

Modified: incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestOmitTf.cs Mon Mar 12 22:29:26 2012
@@ -203,9 +203,9 @@ namespace Lucene.Net.Index
 		
         private class AnonymousIDFExplanation : Explanation.IDFExplanation
         {
-            public override float GetIdf()
+            public override float Idf
             {
-                return 1.0f;
+                get { return 1.0f; }
             }
 
             public override string Explain()
@@ -265,7 +265,7 @@ namespace Lucene.Net.Index
 			
 			// this field will NOT have Tf
 			Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
-			f2.SetOmitTermFreqAndPositions(true);
+			f2.OmitTermFreqAndPositions = true;
 			d.Add(f2);
 			
 			writer.AddDocument(d);
@@ -275,10 +275,10 @@ namespace Lucene.Net.Index
 			d = new Document();
 			
 			// Reverese
-			f1.SetOmitTermFreqAndPositions(true);
+			f1.OmitTermFreqAndPositions = true;
 			d.Add(f1);
 			
-			f2.SetOmitTermFreqAndPositions(false);
+			f2.OmitTermFreqAndPositions = false;
 			d.Add(f2);
 			
 			writer.AddDocument(d);
@@ -315,7 +315,7 @@ namespace Lucene.Net.Index
 			
 			// this field will NOT have Tf
 			Field f2 = new Field("f2", "This field has NO Tf in all docs", Field.Store.NO, Field.Index.ANALYZED);
-			f2.SetOmitTermFreqAndPositions(true);
+			f2.OmitTermFreqAndPositions = true;
 			d.Add(f2);
 			
 			for (int i = 0; i < 30; i++)
@@ -326,10 +326,10 @@ namespace Lucene.Net.Index
 			d = new Document();
 			
 			// Reverese
-			f1.SetOmitTermFreqAndPositions(true);
+			f1.OmitTermFreqAndPositions = true;
 			d.Add(f1);
 			
-			f2.SetOmitTermFreqAndPositions(false);
+			f2.OmitTermFreqAndPositions = false;
 			d.Add(f2);
 			
 			for (int i = 0; i < 30; i++)
@@ -375,7 +375,7 @@ namespace Lucene.Net.Index
 			for (int i = 0; i < 5; i++)
 				writer.AddDocument(d);
 			
-			f2.SetOmitTermFreqAndPositions(true);
+			f2.OmitTermFreqAndPositions = true;
 			
 			for (int i = 0; i < 20; i++)
 				writer.AddDocument(d);
@@ -413,11 +413,11 @@ namespace Lucene.Net.Index
 			IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMaxBufferedDocs(3);
 			writer.SetMergeFactor(2);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			Document d = new Document();
 			
 			Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
-			f1.SetOmitTermFreqAndPositions(true);
+			f1.OmitTermFreqAndPositions = true;
 			d.Add(f1);
 			
 			for (int i = 0; i < 30; i++)
@@ -457,7 +457,7 @@ namespace Lucene.Net.Index
 				sb.Append(term).Append(" ");
 				System.String content = sb.ToString();
 				Field noTf = new Field("noTf", content + (i % 2 == 0?"":" notf"), Field.Store.NO, Field.Index.ANALYZED);
-				noTf.SetOmitTermFreqAndPositions(true);
+				noTf.OmitTermFreqAndPositions = true;
 				d.Add(noTf);
 				
 				Field tf = new Field("tf", content + (i % 2 == 0?" tf":""), Field.Store.NO, Field.Index.ANALYZED);
@@ -476,7 +476,7 @@ namespace Lucene.Net.Index
 			* Verify the index
 			*/
 			Searcher searcher = new IndexSearcher(dir, true);
-			searcher.SetSimilarity(new SimpleSimilarity());
+			searcher.Similarity = new SimpleSimilarity();
 			
 			Term a = new Term("noTf", term);
 			Term b = new Term("tf", term);

Modified: incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs Mon Mar 12 22:29:26 2012
@@ -146,21 +146,21 @@ namespace Lucene.Net.Index
 			pr.Add(IndexReader.Open(dir1, false));
             pr.Add(IndexReader.Open(dir2, false));
 			
-			Assert.IsTrue(pr.IsCurrent());
+			Assert.IsTrue(pr.IsCurrent);
             IndexReader modifier = IndexReader.Open(dir1, false);
 			modifier.SetNorm(0, "f1", 100);
 			modifier.Close();
 			
 			// one of the two IndexReaders which ParallelReader is using
 			// is not current anymore
-			Assert.IsFalse(pr.IsCurrent());
+			Assert.IsFalse(pr.IsCurrent);
 
             modifier = IndexReader.Open(dir2, false);
 			modifier.SetNorm(0, "f3", 100);
 			modifier.Close();
 			
 			// now both are not current anymore
-			Assert.IsFalse(pr.IsCurrent());
+			Assert.IsFalse(pr.IsCurrent);
 		}
 		
 		[Test]
@@ -186,7 +186,7 @@ namespace Lucene.Net.Index
 			ParallelReader pr = new ParallelReader();
             pr.Add(IndexReader.Open(dir1, false));
             pr.Add(IndexReader.Open(dir2, false));
-			Assert.IsFalse(pr.IsOptimized());
+			Assert.IsFalse(pr.IsOptimized);
 			pr.Close();
 			
 			modifier = new IndexWriter(dir1, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
@@ -197,7 +197,7 @@ namespace Lucene.Net.Index
             pr.Add(IndexReader.Open(dir1, false));
             pr.Add(IndexReader.Open(dir2, false));
 			// just one of the two indexes are optimized
-			Assert.IsFalse(pr.IsOptimized());
+			Assert.IsFalse(pr.IsOptimized);
 			pr.Close();
 			
 			
@@ -209,7 +209,7 @@ namespace Lucene.Net.Index
             pr.Add(IndexReader.Open(dir1, false));
             pr.Add(IndexReader.Open(dir2, false));
 			// now both indexes are optimized
-			Assert.IsTrue(pr.IsOptimized());
+			Assert.IsTrue(pr.IsOptimized);
 			pr.Close();
 		}
 		
@@ -243,9 +243,9 @@ namespace Lucene.Net.Index
 			Assert.AreEqual(parallelHits.Length, singleHits.Length);
 			for (int i = 0; i < parallelHits.Length; i++)
 			{
-				Assert.AreEqual(parallelHits[i].score, singleHits[i].score, 0.001f);
-				Document docParallel = parallel.Doc(parallelHits[i].doc);
-				Document docSingle = single.Doc(singleHits[i].doc);
+				Assert.AreEqual(parallelHits[i].Score, singleHits[i].Score, 0.001f);
+				Document docParallel = parallel.Doc(parallelHits[i].Doc);
+				Document docSingle = single.Doc(singleHits[i].Doc);
 				Assert.AreEqual(docParallel.Get("f1"), docSingle.Get("f1"));
 				Assert.AreEqual(docParallel.Get("f2"), docSingle.Get("f2"));
 				Assert.AreEqual(docParallel.Get("f3"), docSingle.Get("f3"));

Modified: incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs Mon Mar 12 22:29:26 2012
@@ -95,7 +95,7 @@ namespace Lucene.Net.Index
 			rnd = NewRandom();
 			byte[] testData = System.Text.UTF8Encoding.UTF8.GetBytes("This is a test!");
 			Payload payload = new Payload(testData);
-			Assert.AreEqual(testData.Length, payload.Length(), "Wrong payload length.");
+			Assert.AreEqual(testData.Length, payload.Length, "Wrong payload length.");
 			
 			// test copyTo()
 			byte[] target = new byte[testData.Length - 1];
@@ -139,8 +139,8 @@ namespace Lucene.Net.Index
 			}
 			
 			Payload clone = (Payload) payload.Clone();
-			Assert.AreEqual(payload.Length(), clone.Length());
-			for (int i = 0; i < payload.Length(); i++)
+			Assert.AreEqual(payload.Length, clone.Length);
+			for (int i = 0; i < payload.Length; i++)
 			{
 				Assert.AreEqual(payload.ByteAt(i), clone.ByteAt(i));
 			}
@@ -304,7 +304,7 @@ namespace Lucene.Net.Index
 					{
 						tps[j].NextPosition();
 						tps[j].GetPayload(verifyPayloadData, offset);
-						offset += tps[j].GetPayloadLength();
+						offset += tps[j].PayloadLength;
 					}
 				}
 			}
@@ -324,7 +324,7 @@ namespace Lucene.Net.Index
 			tp.NextPosition();
 			// now we don't read this payload
 			tp.NextPosition();
-			Assert.AreEqual(1, tp.GetPayloadLength(), "Wrong payload length.");
+			Assert.AreEqual(1, tp.PayloadLength, "Wrong payload length.");
 			byte[] payload = tp.GetPayload(null, 0);
 			Assert.AreEqual(payload[0], payloadData[numTerms]);
 			tp.NextPosition();
@@ -332,7 +332,7 @@ namespace Lucene.Net.Index
 			// we don't read this payload and skip to a different document
 			tp.SkipTo(5);
 			tp.NextPosition();
-			Assert.AreEqual(1, tp.GetPayloadLength(), "Wrong payload length.");
+			Assert.AreEqual(1, tp.PayloadLength, "Wrong payload length.");
 			payload = tp.GetPayload(null, 0);
 			Assert.AreEqual(payload[0], payloadData[5 * numTerms]);
 			
@@ -343,16 +343,16 @@ namespace Lucene.Net.Index
 			tp.Seek(terms[1]);
 			tp.Next();
 			tp.NextPosition();
-			Assert.AreEqual(1, tp.GetPayloadLength(), "Wrong payload length.");
+			Assert.AreEqual(1, tp.PayloadLength, "Wrong payload length.");
 			tp.SkipTo(skipInterval - 1);
 			tp.NextPosition();
-			Assert.AreEqual(1, tp.GetPayloadLength(), "Wrong payload length.");
+			Assert.AreEqual(1, tp.PayloadLength, "Wrong payload length.");
 			tp.SkipTo(2 * skipInterval - 1);
 			tp.NextPosition();
-			Assert.AreEqual(1, tp.GetPayloadLength(), "Wrong payload length.");
+			Assert.AreEqual(1, tp.PayloadLength, "Wrong payload length.");
 			tp.SkipTo(3 * skipInterval - 1);
 			tp.NextPosition();
-			Assert.AreEqual(3 * skipInterval - 2 * numDocs - 1, tp.GetPayloadLength(), "Wrong payload length.");
+			Assert.AreEqual(3 * skipInterval - 2 * numDocs - 1, tp.PayloadLength, "Wrong payload length.");
 			
 			/*
 			* Test multiple call of getPayload()
@@ -394,7 +394,7 @@ namespace Lucene.Net.Index
 			tp.Next();
 			tp.NextPosition();
 			
-			verifyPayloadData = new byte[tp.GetPayloadLength()];
+			verifyPayloadData = new byte[tp.PayloadLength];
 			tp.GetPayload(verifyPayloadData, 0);
 			byte[] portion = new byte[1500];
 			Array.Copy(payloadData, 100, portion, 0, 1500);
@@ -534,14 +534,14 @@ namespace Lucene.Net.Index
 						if (p == null)
 						{
 							p = new Payload();
-							payloadAtt.SetPayload(p);
+							payloadAtt.Payload = p;
 						}
 						p.SetData(data, offset, length);
 						offset += length;
 					}
 					else
 					{
-						payloadAtt.SetPayload(null);
+						payloadAtt.Payload = null;
 					}
 				}
 				
@@ -637,7 +637,7 @@ namespace Lucene.Net.Index
 				first = false;
                 ClearAttributes();
 				termAtt.SetTermBuffer(term);
-				payloadAtt.SetPayload(new Payload(payload));
+				payloadAtt.Payload = new Payload(payload);
 				return true;
 			}
 

Modified: incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs Mon Mar 12 22:29:26 2012
@@ -81,7 +81,7 @@ namespace Lucene.Net.Index
 				System.String token = tokens[i];
 				mapper.Map(token, 1, null, thePositions[i]);
 			}
-			var map = mapper.GetFieldToTerms();
+			var map = mapper.FieldToTerms;
 			Assert.IsTrue(map != null, "map is null and it shouldn't be");
 			Assert.IsTrue(map.Count == 1, "map Size: " + map.Count + " is not: " + 1);
 			var positions = map["test"];

Modified: incubator/lucene.net/trunk/test/core/Index/TestRollback.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestRollback.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestRollback.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestRollback.cs Mon Mar 12 22:29:26 2012
@@ -61,7 +61,7 @@ namespace Lucene.Net.Index
             w.Rollback();
 
             IndexReader r = IndexReader.Open(dir, true);
-            Assert.AreEqual(5, r.NumDocs(), "index should contain same number of docs post rollback");
+            Assert.AreEqual(5, r.NumDocs, "index should contain same number of docs post rollback");
             r.Close();
             dir.Close();
         }

Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs Mon Mar 12 22:29:26 2012
@@ -95,7 +95,7 @@ namespace Lucene.Net.Index
 			//Should be able to open a new SegmentReader against the new directory
             SegmentReader mergedReader = SegmentReader.Get(true, new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(mergedReader != null);
-			Assert.IsTrue(mergedReader.NumDocs() == 2);
+			Assert.IsTrue(mergedReader.NumDocs == 2);
 			Document newDoc1 = mergedReader.Document(0);
 			Assert.IsTrue(newDoc1 != null);
 			//There are 2 unstored fields on the document

Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs Mon Mar 12 22:29:26 2012
@@ -16,11 +16,10 @@
  */
 
 using System;
-
+using Lucene.Net.Documents;
 using NUnit.Framework;
 
 using Document = Lucene.Net.Documents.Document;
-using Fieldable = Lucene.Net.Documents.Fieldable;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using DefaultSimilarity = Lucene.Net.Search.DefaultSimilarity;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
@@ -72,8 +71,8 @@ namespace Lucene.Net.Index
 		[Test]
 		public virtual void  TestDocument()
 		{
-			Assert.IsTrue(reader.NumDocs() == 1);
-			Assert.IsTrue(reader.MaxDoc() >= 1);
+			Assert.IsTrue(reader.NumDocs == 1);
+			Assert.IsTrue(reader.MaxDoc >= 1);
 			Document result = reader.Document(0);
 			Assert.IsTrue(result != null);
 			//There are 2 unstored fields on the document that are not preserved across writing
@@ -83,7 +82,7 @@ namespace Lucene.Net.Index
             foreach (var field in fields)
 			{
 				Assert.IsTrue(field != null);
-				Assert.IsTrue(DocHelper.nameValues.Contains(field.Name()));
+				Assert.IsTrue(DocHelper.nameValues.Contains(field.Name));
 			}
 		}
 		
@@ -95,11 +94,11 @@ namespace Lucene.Net.Index
 			SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete);
             SegmentReader deleteReader = SegmentReader.Get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(deleteReader != null);
-			Assert.IsTrue(deleteReader.NumDocs() == 1);
+			Assert.IsTrue(deleteReader.NumDocs == 1);
 			deleteReader.DeleteDocument(0);
 			Assert.IsTrue(deleteReader.IsDeleted(0) == true);
-			Assert.IsTrue(deleteReader.HasDeletions() == true);
-			Assert.IsTrue(deleteReader.NumDocs() == 0);
+			Assert.IsTrue(deleteReader.HasDeletions == true);
+			Assert.IsTrue(deleteReader.NumDocs == 0);
 		}
 		
 		[Test]
@@ -188,20 +187,20 @@ namespace Lucene.Net.Index
 			// test omit norms
 			for (int i = 0; i < DocHelper.fields.Length; i++)
 			{
-				Fieldable f = DocHelper.fields[i];
-				if (f.IsIndexed())
+				IFieldable f = DocHelper.fields[i];
+				if (f.IsIndexed)
 				{
-					Assert.AreEqual(reader.HasNorms(f.Name()), !f.GetOmitNorms());
-					Assert.AreEqual(reader.HasNorms(f.Name()), !DocHelper.noNorms.Contains(f.Name()));
-					if (!reader.HasNorms(f.Name()))
+					Assert.AreEqual(reader.HasNorms(f.Name), !f.OmitNorms);
+					Assert.AreEqual(reader.HasNorms(f.Name), !DocHelper.noNorms.Contains(f.Name));
+					if (!reader.HasNorms(f.Name))
 					{
 						// test for fake norms of 1.0 or null depending on the flag
-						byte[] norms = reader.Norms(f.Name());
+						byte[] norms = reader.Norms(f.Name);
 						byte norm1 = DefaultSimilarity.EncodeNorm(1.0f);
 						Assert.IsNull(norms);
-						norms = new byte[reader.MaxDoc()];
-						reader.Norms(f.Name(), norms, 0);
-						for (int j = 0; j < reader.MaxDoc(); j++)
+						norms = new byte[reader.MaxDoc];
+						reader.Norms(f.Name, norms, 0);
+						for (int j = 0; j < reader.MaxDoc; j++)
 						{
 							Assert.AreEqual(norms[j], norm1);
 						}

Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentTermDocs.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentTermDocs.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentTermDocs.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentTermDocs.cs Mon Mar 12 22:29:26 2012
@@ -77,7 +77,7 @@ namespace Lucene.Net.Index
 			//After adding the document, we should be able to read it back in
 			SegmentReader reader = SegmentReader.Get(true, info, indexDivisor);
 			Assert.IsTrue(reader != null);
-			Assert.AreEqual(indexDivisor, reader.GetTermInfosIndexDivisor());
+			Assert.AreEqual(indexDivisor, reader.TermInfosIndexDivisor);
 			SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
 			Assert.IsTrue(segTermDocs != null);
 			segTermDocs.Seek(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));

Modified: incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs Mon Mar 12 22:29:26 2012
@@ -253,7 +253,7 @@ namespace Lucene.Net.Index
 			// While we hold the snapshot, and nomatter how long
 			// we take to do the backup, the IndexWriter will
 			// never delete the files in the snapshot:
-			System.Collections.Generic.ICollection<string> files = cp.GetFileNames();
+			System.Collections.Generic.ICollection<string> files = cp.FileNames;
             foreach (string fileName in files)
 			{
 				// NOTE: in a real backup you would not use

Modified: incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs Mon Mar 12 22:29:26 2012
@@ -39,7 +39,7 @@ namespace Lucene.Net.Index
 		{
 			public virtual int Compare(System.Object o1, System.Object o2)
 			{
-				return String.CompareOrdinal(((Fieldable) o1).Name(), ((Fieldable) o2).Name());
+				return String.CompareOrdinal(((IFieldable) o1).Name, ((IFieldable) o2).Name);
 			}
 		}
 		internal static int maxFields = 4;
@@ -162,7 +162,7 @@ namespace Lucene.Net.Index
 		{
 			System.Collections.Hashtable docs = new System.Collections.Hashtable();
 			IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
-			w.SetUseCompoundFile(false);
+			w.UseCompoundFile = false;
 			
 			/***
 			w.setMaxMergeDocs(Integer.MAX_VALUE);
@@ -221,7 +221,7 @@ namespace Lucene.Net.Index
 			for (int iter = 0; iter < 3; iter++)
 			{
 				IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
-				w.SetUseCompoundFile(false);
+				w.UseCompoundFile = false;
 				
 				// force many merges
 				w.SetMergeFactor(mergeFactor);
@@ -280,17 +280,17 @@ namespace Lucene.Net.Index
 			while (iter.MoveNext())
 			{
 				Document d = (Document) iter.Current;
-                var fields = new List<Fieldable>();
+                var fields = new List<IFieldable>();
 				fields.AddRange(d.GetFields());
 				// put fields in same order each time
                 //{{Lucene.Net-2.9.1}} No, don't change the order of the fields
 				//SupportClass.CollectionsHelper.Sort(fields, fieldNameComparator);
 				
 				Document d1 = new Document();
-				d1.SetBoost(d.GetBoost());
+				d1.SetBoost(d.Boost);
 				for (int i = 0; i < fields.Count; i++)
 				{
-					d1.Add((Fieldable) fields[i]);
+					d1.Add((IFieldable) fields[i]);
 				}
 				w.AddDocument(d1);
 				// System.out.println("indexing "+d1);
@@ -318,10 +318,10 @@ namespace Lucene.Net.Index
 		
 		public static void  VerifyEquals(IndexReader r1, IndexReader r2, System.String idField)
 		{
-			Assert.AreEqual(r1.NumDocs(), r2.NumDocs());
-			bool hasDeletes = !(r1.MaxDoc() == r2.MaxDoc() && r1.NumDocs() == r1.MaxDoc());
+			Assert.AreEqual(r1.NumDocs, r2.NumDocs);
+			bool hasDeletes = !(r1.MaxDoc == r2.MaxDoc && r1.NumDocs == r1.MaxDoc);
 			
-			int[] r2r1 = new int[r2.MaxDoc()]; // r2 id to r1 id mapping
+			int[] r2r1 = new int[r2.MaxDoc]; // r2 id to r1 id mapping
 			
 			TermDocs termDocs1 = r1.TermDocs();
 			TermDocs termDocs2 = r2.TermDocs();
@@ -403,8 +403,8 @@ namespace Lucene.Net.Index
 			TermEnum termEnum2 = r2.Terms(new Term("", ""));
 			
 			// pack both doc and freq into single element for easy sorting
-			long[] info1 = new long[r1.NumDocs()];
-			long[] info2 = new long[r2.NumDocs()];
+			long[] info1 = new long[r1.NumDocs];
+			long[] info2 = new long[r2.NumDocs];
 			
 			for (; ; )
 			{
@@ -482,8 +482,8 @@ namespace Lucene.Net.Index
 			var ff1 = d1.GetFields();
 			var ff2 = d2.GetFields();
 
-		    ff1.OrderBy(x => x.Name());
-		    ff2.OrderBy(x => x.Name());
+		    ff1.OrderBy(x => x.Name);
+		    ff2.OrderBy(x => x.Name);
 			
 			if (ff1.Count != ff2.Count)
 			{
@@ -495,17 +495,17 @@ namespace Lucene.Net.Index
 			
 			for (int i = 0; i < ff1.Count; i++)
 			{
-				Fieldable f1 = (Fieldable) ff1[i];
-				Fieldable f2 = (Fieldable) ff2[i];
-				if (f1.IsBinary())
+				IFieldable f1 = (IFieldable) ff1[i];
+				IFieldable f2 = (IFieldable) ff2[i];
+				if (f1.IsBinary)
 				{
-					System.Diagnostics.Debug.Assert(f2.IsBinary());
+					System.Diagnostics.Debug.Assert(f2.IsBinary);
 					//TODO
 				}
 				else
 				{
-					System.String s1 = f1.StringValue();
-					System.String s2 = f2.StringValue();
+					System.String s1 = f1.StringValue;
+					System.String s2 = f2.StringValue;
 					if (!s1.Equals(s2))
 					{
 						// print out whole doc on error
@@ -568,8 +568,8 @@ namespace Lucene.Net.Index
 							Assert.AreEqual(pos1[k], pos2[k]);
 							if (offsets1 != null)
 							{
-								Assert.AreEqual(offsets1[k].GetStartOffset(), offsets2[k].GetStartOffset());
-								Assert.AreEqual(offsets1[k].GetEndOffset(), offsets2[k].GetEndOffset());
+								Assert.AreEqual(offsets1[k].StartOffset, offsets2[k].StartOffset);
+								Assert.AreEqual(offsets1[k].EndOffset, offsets2[k].EndOffset);
 							}
 						}
 					}
@@ -742,7 +742,7 @@ namespace Lucene.Net.Index
 				
 				for (int i = 0; i < fields.Count; i++)
 				{
-					d.Add((Fieldable) fields[i]);
+					d.Add((IFieldable) fields[i]);
 				}
 				w.UpdateDocument(Lucene.Net.Index.TestStressIndexing2.idTerm.CreateTerm(idString), d);
 				// System.out.println("indexing "+d);

Modified: incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs Mon Mar 12 22:29:26 2012
@@ -120,14 +120,14 @@ namespace Lucene.Net.Index
 					TestToken token = tokens[tokenUpto++] = new TestToken(this);
 					token.text = testTerms[i];
 					token.pos = positions[i][j];
-					token.startOffset = offsets[i][j].GetStartOffset();
-					token.endOffset = offsets[i][j].GetEndOffset();
+					token.startOffset = offsets[i][j].StartOffset;
+					token.endOffset = offsets[i][j].EndOffset;
 				}
 			}
 			System.Array.Sort(tokens);
 			
 			IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(this), true, IndexWriter.MaxFieldLength.LIMITED);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			Document doc = new Document();
 			for (int i = 0; i < testFields.Length; i++)
 			{
@@ -195,11 +195,11 @@ namespace Lucene.Net.Index
 					offsetAtt.SetOffset(testToken.startOffset, testToken.endOffset);
 					if (tokenUpto > 1)
 					{
-						posIncrAtt.SetPositionIncrement(testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos);
+						posIncrAtt.PositionIncrement = testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos;
 					}
 					else
 					{
-						posIncrAtt.SetPositionIncrement(testToken.pos + 1);
+						posIncrAtt.PositionIncrement = testToken.pos + 1;
 					}
 					return true;
 				}
@@ -355,7 +355,7 @@ namespace Lucene.Net.Index
 			Assert.IsTrue(reader != null);
 			SortedTermVectorMapper mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
 			reader.Get(0, mapper);
-			var set_Renamed = mapper.GetTermVectorEntrySet();
+			var set_Renamed = mapper.TermVectorEntrySet;
 			Assert.IsTrue(set_Renamed != null, "set is null and it shouldn't be");
 			//three fields, 4 terms, all terms are the same
 			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
@@ -364,13 +364,13 @@ namespace Lucene.Net.Index
 			{
 				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
-				Assert.IsTrue(tve.GetOffsets() != null, "tve.getOffsets() is null and it shouldn't be");
-				Assert.IsTrue(tve.GetPositions() != null, "tve.getPositions() is null and it shouldn't be");
+				Assert.IsTrue(tve.Offsets != null, "tve.getOffsets() is null and it shouldn't be");
+				Assert.IsTrue(tve.Positions != null, "tve.getPositions() is null and it shouldn't be");
 			}
 			
 			mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
 			reader.Get(1, mapper);
-			set_Renamed = mapper.GetTermVectorEntrySet();
+			set_Renamed = mapper.TermVectorEntrySet;
 			Assert.IsTrue(set_Renamed != null, "set is null and it shouldn't be");
 			//three fields, 4 terms, all terms are the same
 			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
@@ -379,14 +379,14 @@ namespace Lucene.Net.Index
 			{
 				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
-				Assert.IsTrue(tve.GetOffsets() != null, "tve.getOffsets() is null and it shouldn't be");
-				Assert.IsTrue(tve.GetPositions() != null, "tve.getPositions() is null and it shouldn't be");
+				Assert.IsTrue(tve.Offsets != null, "tve.getOffsets() is null and it shouldn't be");
+				Assert.IsTrue(tve.Positions != null, "tve.getPositions() is null and it shouldn't be");
 			}
 			
 			
 			FieldSortedTermVectorMapper fsMapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
 			reader.Get(0, fsMapper);
-			var map = fsMapper.GetFieldToTerms();
+			var map = fsMapper.FieldToTerms;
 			Assert.IsTrue(map.Count == testFields.Length, "map Size: " + map.Count + " is not: " + testFields.Length);
 			for (var iterator = map.GetEnumerator(); iterator.MoveNext(); )
 			{
@@ -399,27 +399,27 @@ namespace Lucene.Net.Index
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
 					//Check offsets and positions.
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
-					System.String field = tve.GetField();
+					System.String field = tve.Field;
 					if (field.Equals(testFields[0]))
 					{
 						//should have offsets
 						
-						Assert.IsTrue(tve.GetOffsets() != null, "tve.getOffsets() is null and it shouldn't be");
-						Assert.IsTrue(tve.GetPositions() != null, "tve.getPositions() is null and it shouldn't be");
+						Assert.IsTrue(tve.Offsets != null, "tve.getOffsets() is null and it shouldn't be");
+						Assert.IsTrue(tve.Positions != null, "tve.getPositions() is null and it shouldn't be");
 					}
 					else if (field.Equals(testFields[1]))
 					{
 						//should not have offsets
 						
-						Assert.IsTrue(tve.GetOffsets() == null, "tve.getOffsets() is not null and it shouldn't be");
-						Assert.IsTrue(tve.GetPositions() == null, "tve.getPositions() is not null and it shouldn't be");
+						Assert.IsTrue(tve.Offsets == null, "tve.getOffsets() is not null and it shouldn't be");
+						Assert.IsTrue(tve.Positions == null, "tve.getPositions() is not null and it shouldn't be");
 					}
 				}
 			}
 			//Try mapper that ignores offs and positions
 			fsMapper = new FieldSortedTermVectorMapper(true, true, new TermVectorEntryFreqSortedComparator());
 			reader.Get(0, fsMapper);
-			map = fsMapper.GetFieldToTerms();
+			map = fsMapper.FieldToTerms;
 			Assert.IsTrue(map.Count == testFields.Length, "map Size: " + map.Count + " is not: " + testFields.Length);
 			for (var iterator = map.GetEnumerator(); iterator.MoveNext(); )
 			{
@@ -432,20 +432,20 @@ namespace Lucene.Net.Index
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
 					//Check offsets and positions.
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
-					System.String field = tve.GetField();
+					System.String field = tve.Field;
 					if (field.Equals(testFields[0]))
 					{
 						//should have offsets
 						
-						Assert.IsTrue(tve.GetOffsets() == null, "tve.getOffsets() is null and it shouldn't be");
-						Assert.IsTrue(tve.GetPositions() == null, "tve.getPositions() is null and it shouldn't be");
+						Assert.IsTrue(tve.Offsets == null, "tve.getOffsets() is null and it shouldn't be");
+						Assert.IsTrue(tve.Positions == null, "tve.getPositions() is null and it shouldn't be");
 					}
 					else if (field.Equals(testFields[1]))
 					{
 						//should not have offsets
 						
-						Assert.IsTrue(tve.GetOffsets() == null, "tve.getOffsets() is not null and it shouldn't be");
-						Assert.IsTrue(tve.GetPositions() == null, "tve.getPositions() is not null and it shouldn't be");
+						Assert.IsTrue(tve.Offsets == null, "tve.getOffsets() is not null and it shouldn't be");
+						Assert.IsTrue(tve.Positions == null, "tve.getPositions() is not null and it shouldn't be");
 					}
 				}
 			}

Modified: incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs Mon Mar 12 22:29:26 2012
@@ -160,8 +160,8 @@ namespace Lucene.Net.Index
 				writer.SetMaxBufferedDocs(2);
 
 			    IndexReader reader = IndexReader.Open(directory, true);
-				Assert.IsTrue(reader.IsOptimized());
-				Assert.AreEqual(expectedDocCount, reader.NumDocs());
+				Assert.IsTrue(reader.IsOptimized);
+				Assert.AreEqual(expectedDocCount, reader.NumDocs);
 				reader.Close();
 			}
 			writer.Close();

Modified: incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs Mon Mar 12 22:29:26 2012
@@ -56,7 +56,7 @@ namespace Lucene.Net.Index
 			for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
 			{
 				IndexCommit commit = (IndexCommit) iterator.Current;
-                System.Collections.Generic.IDictionary<string, string> ud = commit.GetUserData();
+                System.Collections.Generic.IDictionary<string, string> ud = commit.UserData;
 				if (ud.Count > 0)
 					if (((System.String) ud["index"]).EndsWith(ids))
 						last = commit;
@@ -93,7 +93,7 @@ namespace Lucene.Net.Index
 			IndexReader r = IndexReader.Open(dir, true);
 			
 			//Perhaps not the most efficient approach but meets our needs here.
-			for (int i = 0; i < r.MaxDoc(); i++)
+			for (int i = 0; i < r.MaxDoc; i++)
 			{
 				if (!r.IsDeleted(i))
 				{
@@ -185,7 +185,7 @@ namespace Lucene.Net.Index
 				for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
 				{
 					IndexCommit commit = (IndexCommit) iterator.Current;
-                    System.Collections.Generic.IDictionary<string, string> userData = commit.GetUserData();
+                    System.Collections.Generic.IDictionary<string, string> userData = commit.UserData;
 					if (userData.Count > 0)
 					{
 						// Label for a commit point is "Records 1-30"
@@ -252,7 +252,7 @@ namespace Lucene.Net.Index
 				// should not work:
 				new IndexWriter(dir, new WhitespaceAnalyzer(), new DeleteLastCommitPolicy(this), MaxFieldLength.UNLIMITED).Close();
 			    IndexReader r = IndexReader.Open(dir, true);
-				Assert.AreEqual(100, r.NumDocs());
+				Assert.AreEqual(100, r.NumDocs);
 				r.Close();
 			}
 		}

Modified: incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs Mon Mar 12 22:29:26 2012
@@ -134,14 +134,14 @@ namespace Lucene.Net.Index
 				IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 				writer1.SetMaxBufferedDocs(3);
 				writer1.SetMergeFactor(2);
-				((ConcurrentMergeScheduler) writer1.GetMergeScheduler()).SetSuppressExceptions();
+				((ConcurrentMergeScheduler) writer1.MergeScheduler).SetSuppressExceptions();
 				
 				IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 				// Intentionally use different params so flush/merge
 				// happen @ different times
 				writer2.SetMaxBufferedDocs(2);
 				writer2.SetMergeFactor(3);
-				((ConcurrentMergeScheduler) writer2.GetMergeScheduler()).SetSuppressExceptions();
+				((ConcurrentMergeScheduler) writer2.MergeScheduler).SetSuppressExceptions();
 				
 				Update(writer1);
 				Update(writer2);
@@ -228,8 +228,8 @@ namespace Lucene.Net.Index
 					r1 = IndexReader.Open(dir1, true);
 				    r2 = IndexReader.Open(dir2, true);
 				}
-				if (r1.NumDocs() != r2.NumDocs())
-					throw new System.SystemException("doc counts differ: r1=" + r1.NumDocs() + " r2=" + r2.NumDocs());
+				if (r1.NumDocs != r2.NumDocs)
+					throw new System.SystemException("doc counts differ: r1=" + r1.NumDocs + " r2=" + r2.NumDocs);
 				r1.Close();
 				r2.Close();
 			}

Modified: incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs Mon Mar 12 22:29:26 2012
@@ -85,13 +85,13 @@ namespace Lucene.Net.QueryParsers
 			Assert.AreEqual("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2").ToString());
 			
 			// phrase after changing default slop
-			qp.SetPhraseSlop(99);
+			qp.PhraseSlop = 99;
 			Assert.AreEqual("\"(multi multi2) foo\"~99 bar", qp.Parse("\"multi foo\" bar").ToString());
 			Assert.AreEqual("\"(multi multi2) foo\"~99 \"foo bar\"~2", qp.Parse("\"multi foo\" \"foo bar\"~2").ToString());
-			qp.SetPhraseSlop(0);
+			qp.PhraseSlop = 0;
 			
 			// non-default operator:
-			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
+			qp.DefaultOperator = QueryParser.AND_OPERATOR;
 			Assert.AreEqual("+(multi multi2) +foo", qp.Parse("multi foo").ToString());
 		}
 		
@@ -100,7 +100,7 @@ namespace Lucene.Net.QueryParsers
 		{
 			
 			DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer(this));
-			qp.SetPhraseSlop(99); // modified default slop
+			qp.PhraseSlop = 99; // modified default slop
 			
 			// direct call to (super's) getFieldQuery to demonstrate differnce
 			// between phrase and multiphrase with modified default slop
@@ -196,7 +196,7 @@ namespace Lucene.Net.QueryParsers
 					termAtt.SetTermBuffer("multi" + (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken + 1));
 					offsetAtt.SetOffset(prevStartOffset, prevEndOffset);
 					typeAtt.SetType(prevType);
-					posIncrAtt.SetPositionIncrement(0);
+					posIncrAtt.PositionIncrement = 0;
 					Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken--;
 					return true;
 				}
@@ -298,12 +298,12 @@ namespace Lucene.Net.QueryParsers
 					}
 					else if (termAtt.Term().Equals("quick"))
 					{
-						posIncrAtt.SetPositionIncrement(2);
+						posIncrAtt.PositionIncrement = 2;
 						return true;
 					}
 					else
 					{
-						posIncrAtt.SetPositionIncrement(1);
+						posIncrAtt.PositionIncrement = 1;
 						return true;
 					}
 				}

Modified: incubator/lucene.net/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/trunk/test/core/QueryParser/TestMultiFieldQueryParser.cs Mon Mar 12 22:29:26 2012
@@ -126,7 +126,7 @@ namespace Lucene.Net.QueryParsers
 			Assert.AreEqual("(b:one t:one) f:two", q.ToString());
 			
 			// AND mode:
-			mfqp.SetDefaultOperator(QueryParser.AND_OPERATOR);
+			mfqp.DefaultOperator = QueryParser.AND_OPERATOR;
 			q = mfqp.Parse("one two");
 			Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
 			q = mfqp.Parse("\"aa bb cc\" \"dd ee\"");
@@ -325,7 +325,7 @@ namespace Lucene.Net.QueryParsers
 			iw.Close();
 			
 			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Util.Version.LUCENE_CURRENT, new []{"body"}, analyzer);
-			mfqp.SetDefaultOperator(QueryParser.Operator.AND);
+			mfqp.DefaultOperator = QueryParser.Operator.AND;
 			Query q = mfqp.Parse("the footest");
 			IndexSearcher is_Renamed = new IndexSearcher(ramDir, true);
 			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).ScoreDocs;

Modified: incubator/lucene.net/trunk/test/core/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/QueryParser/TestQueryParser.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/QueryParser/TestQueryParser.cs (original)
+++ incubator/lucene.net/trunk/test/core/QueryParser/TestQueryParser.cs Mon Mar 12 22:29:26 2012
@@ -216,7 +216,7 @@ namespace Lucene.Net.QueryParsers
 			if (a == null)
 				a = new SimpleAnalyzer();
             QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
-			qp.SetDefaultOperator(QueryParser.OR_OPERATOR);
+			qp.DefaultOperator = QueryParser.OR_OPERATOR;
 			return qp;
 		}
 		
@@ -257,8 +257,8 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  AssertWildcardQueryEquals(System.String query, bool lowercase, System.String result, bool allowLeadingWildcard)
 		{
 			QueryParser qp = GetParser(null);
-			qp.SetLowercaseExpandedTerms(lowercase);
-			qp.SetAllowLeadingWildcard(allowLeadingWildcard);
+			qp.LowercaseExpandedTerms = lowercase;
+			qp.AllowLeadingWildcard = allowLeadingWildcard;
 			Query q = qp.Parse(query);
 			System.String s = q.ToString("field");
 			if (!s.Equals(result))
@@ -288,7 +288,7 @@ namespace Lucene.Net.QueryParsers
 			if (a == null)
 				a = new SimpleAnalyzer();
             QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
-			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
+			qp.DefaultOperator = QueryParser.AND_OPERATOR;
 			return qp.Parse(query);
 		}
 		
@@ -375,11 +375,11 @@ namespace Lucene.Net.QueryParsers
 
             QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new StandardAnalyzer(Util.Version.LUCENE_CURRENT));
 			// make sure OR is the default:
-			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
-			qp.SetDefaultOperator(QueryParser.AND_OPERATOR);
-			Assert.AreEqual(QueryParser.AND_OPERATOR, qp.GetDefaultOperator());
-			qp.SetDefaultOperator(QueryParser.OR_OPERATOR);
-			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.GetDefaultOperator());
+			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.DefaultOperator);
+			qp.DefaultOperator = QueryParser.AND_OPERATOR;
+			Assert.AreEqual(QueryParser.AND_OPERATOR, qp.DefaultOperator);
+			qp.DefaultOperator = QueryParser.OR_OPERATOR;
+			Assert.AreEqual(QueryParser.OR_OPERATOR, qp.DefaultOperator);
 		}
 		
 		[Test]
@@ -432,11 +432,11 @@ namespace Lucene.Net.QueryParsers
 			Assert.IsTrue(GetQuery("term~", null) is FuzzyQuery);
 			Assert.IsTrue(GetQuery("term~0.7", null) is FuzzyQuery);
 			FuzzyQuery fq = (FuzzyQuery) GetQuery("term~0.7", null);
-			Assert.AreEqual(0.7f, fq.GetMinSimilarity(), 0.1f);
-			Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.GetPrefixLength());
+			Assert.AreEqual(0.7f, fq.MinSimilarity, 0.1f);
+			Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.PrefixLength);
 			fq = (FuzzyQuery) GetQuery("term~", null);
-			Assert.AreEqual(0.5f, fq.GetMinSimilarity(), 0.1f);
-			Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.GetPrefixLength());
+			Assert.AreEqual(0.5f, fq.MinSimilarity, 0.1f);
+			Assert.AreEqual(FuzzyQuery.defaultPrefixLength, fq.PrefixLength);
 			
 			AssertParseException("term~1.1"); // value > 1, throws exception
 			
@@ -505,7 +505,7 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestLeadingWildcardType()
 		{
 			QueryParser qp = GetParser(null);
-			qp.SetAllowLeadingWildcard(true);
+			qp.AllowLeadingWildcard = true;
 			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("t*erm*").GetType());
 			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("?term*").GetType());
 			Assert.AreEqual(typeof(WildcardQuery), qp.Parse("*term*").GetType());
@@ -544,11 +544,11 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestRange()
 		{
 			AssertQueryEquals("[ a TO z]", null, "[a TO z]");
-			Assert.AreEqual(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery) GetQuery("[ a TO z]", null)).GetRewriteMethod());
+            Assert.AreEqual(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)GetQuery("[ a TO z]", null)).QueryRewriteMethod);
 
             QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
-			qp.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
-			Assert.AreEqual(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery) qp.Parse("[ a TO z]")).GetRewriteMethod());
+			qp.MultiTermRewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
+            Assert.AreEqual(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE, ((TermRangeQuery)qp.Parse("[ a TO z]")).QueryRewriteMethod);
 			
 			AssertQueryEquals("[ a TO z ]", null, "[a TO z]");
 			AssertQueryEquals("{ a TO z}", null, "{a TO z}");
@@ -578,7 +578,7 @@ namespace Lucene.Net.QueryParsers
 			// RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
 			// characters properly.
 			System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("ar").CompareInfo;
-			qp.SetRangeCollator(c);
+			qp.RangeCollator = c;
 			
 			// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
 			// orders the U+0698 character before the U+0633 character, so the single
@@ -587,7 +587,7 @@ namespace Lucene.Net.QueryParsers
 			// supported).
 			
 			// Test ConstantScoreRangeQuery
-			qp.SetMultiTermRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+			qp.MultiTermRewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE;
 			ScoreDoc[] result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).ScoreDocs;
 			Assert.AreEqual(0, result.Length, "The index Term should not be included.");
 			
@@ -595,7 +595,7 @@ namespace Lucene.Net.QueryParsers
 			Assert.AreEqual(1, result.Length, "The index Term should be included.");
 			
 			// Test TermRangeQuery
-			qp.SetMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
+			qp.MultiTermRewriteMethod = MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE;
 			result = is_Renamed.Search(qp.Parse("[ \u062F TO \u0698 ]"), null, 1000).ScoreDocs;
 			Assert.AreEqual(0, result.Length, "The index Term should not be included.");
 			
@@ -898,10 +898,10 @@ namespace Lucene.Net.QueryParsers
 			Assert.IsNotNull(q);
 			q = qp.Parse("\"hello\"^2.0");
 			Assert.IsNotNull(q);
-			Assert.AreEqual(q.GetBoost(), (float) 2.0, (float) 0.5);
+			Assert.AreEqual(q.Boost, (float) 2.0, (float) 0.5);
 			q = qp.Parse("hello^2.0");
 			Assert.IsNotNull(q);
-			Assert.AreEqual(q.GetBoost(), (float) 2.0, (float) 0.5);
+			Assert.AreEqual(q.Boost, (float) 2.0, (float) 0.5);
 			q = qp.Parse("\"on\"^1.0");
 			Assert.IsNotNull(q);
 
@@ -910,7 +910,7 @@ namespace Lucene.Net.QueryParsers
 			// "the" is a stop word so the result is an empty query:
 			Assert.IsNotNull(q);
 			Assert.AreEqual("", q.ToString());
-			Assert.AreEqual(1.0f, q.GetBoost(), 0.01f);
+			Assert.AreEqual(1.0f, q.Boost, 0.01f);
 		}
 		
 		public virtual void  AssertParseException(System.String queryString)
@@ -1019,36 +1019,36 @@ namespace Lucene.Net.QueryParsers
 			TermQuery tq;
 			
 			tq = (TermQuery) qp.Parse("foo:zoo*");
-			Assert.AreEqual("zoo", tq.GetTerm().Text());
+			Assert.AreEqual("zoo", tq.Term.Text());
 			Assert.AreEqual(2, type[0]);
 			
 			tq = (TermQuery) qp.Parse("foo:zoo*^2");
-			Assert.AreEqual("zoo", tq.GetTerm().Text());
+			Assert.AreEqual("zoo", tq.Term.Text());
 			Assert.AreEqual(2, type[0]);
-			Assert.AreEqual(tq.GetBoost(), 2, 0);
+			Assert.AreEqual(tq.Boost, 2, 0);
 			
 			tq = (TermQuery) qp.Parse("foo:*");
-			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual("*", tq.Term.Text());
 			Assert.AreEqual(1, type[0]); // could be a valid prefix query in the future too
 			
 			tq = (TermQuery) qp.Parse("foo:*^2");
-			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual("*", tq.Term.Text());
 			Assert.AreEqual(1, type[0]);
-			Assert.AreEqual(tq.GetBoost(), 2, 0);
+			Assert.AreEqual(tq.Boost, 2, 0);
 			
 			tq = (TermQuery) qp.Parse("*:foo");
-			Assert.AreEqual("*", tq.GetTerm().Field());
-			Assert.AreEqual("foo", tq.GetTerm().Text());
+			Assert.AreEqual("*", tq.Term.Field());
+			Assert.AreEqual("foo", tq.Term.Text());
 			Assert.AreEqual(3, type[0]);
 			
 			tq = (TermQuery) qp.Parse("*:*");
-			Assert.AreEqual("*", tq.GetTerm().Field());
-			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual("*", tq.Term.Field());
+			Assert.AreEqual("*", tq.Term.Text());
 			Assert.AreEqual(1, type[0]); // could be handled as a prefix query in the future
 			
 			tq = (TermQuery) qp.Parse("(*:*)");
-			Assert.AreEqual("*", tq.GetTerm().Field());
-			Assert.AreEqual("*", tq.GetTerm().Text());
+			Assert.AreEqual("*", tq.Term.Field());
+			Assert.AreEqual("*", tq.Term.Text());
 			Assert.AreEqual(1, type[0]);
 		}
 		
@@ -1078,7 +1078,7 @@ namespace Lucene.Net.QueryParsers
             QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "a",
                                              new StopAnalyzer(Version.LUCENE_CURRENT,
                                                               StopFilter.MakeStopSet(new[] {"the", "in", "are", "this"})));
-            qp.SetEnablePositionIncrements(true);
+            qp.SetEnablePositionIncrements(new QueryParser.SetEnablePositionIncrementsParams(true));
             System.String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
             //               0         2                      5           7  8
             int[] expectedPositions = new int[] {1, 3, 4, 6, 9};
@@ -1108,7 +1108,7 @@ namespace Lucene.Net.QueryParsers
 		private void  AssertHits(int expected, System.String query, IndexSearcher is_Renamed)
 		{
             QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "date", new WhitespaceAnalyzer());
-			qp.SetLocale(new System.Globalization.CultureInfo("en-US"));
+			qp.Locale = new System.Globalization.CultureInfo("en-US");
 			Query q = qp.Parse(query);
 			ScoreDoc[] hits = is_Renamed.Search(q, null, 1000).ScoreDocs;
 			Assert.AreEqual(expected, hits.Length);
@@ -1167,7 +1167,7 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestProtectedCtors()
 		{
             // If the return type is not null, then fail the assertion.
-			if (typeof(QueryParser).GetConstructor(new System.Type[]{typeof(CharStream)}) != null)
+			if (typeof(QueryParser).GetConstructor(new System.Type[]{typeof(ICharStream)}) != null)
             {
                 // Fail the assertion.
 				Assert.Fail("please switch public QueryParser(CharStream) to be protected");

Modified: incubator/lucene.net/trunk/test/core/Search/CheckHits.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Search/CheckHits.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Search/CheckHits.cs (original)
+++ incubator/lucene.net/trunk/test/core/Search/CheckHits.cs Mon Mar 12 22:29:26 2012
@@ -183,7 +183,7 @@ namespace Lucene.Net.Search
 			System.Collections.ArrayList actual = new System.Collections.ArrayList();
 			for (int i = 0; i < hits.Length; i++)
 			{
-				CollectionsHelper.AddIfNotContains(actual, hits[i].doc);
+				CollectionsHelper.AddIfNotContains(actual, hits[i].Doc);
 			}
             actual.Sort();
 			
@@ -198,7 +198,7 @@ namespace Lucene.Net.Search
 			Assert.AreEqual(hits.Length, results.Length, mes + " nr of hits");
 			for (int i = 0; i < results.Length; i++)
 			{
-				Assert.AreEqual(results[i], hits[i].doc, mes + " doc nrs for hit " + i);
+				Assert.AreEqual(results[i], hits[i].Doc, mes + " doc nrs for hit " + i);
 			}
 		}
 		
@@ -222,14 +222,14 @@ namespace Lucene.Net.Search
 			}
 			for (int i = 0; i < hits1.Length; i++)
 			{
-				if (hits1[i].doc != hits2[i].doc)
+				if (hits1[i].Doc != hits2[i].Doc)
 				{
 					Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString());
 				}
 				
-				if ((hits1[i].doc != hits2[i].doc) || System.Math.Abs(hits1[i].score - hits2[i].score) > scoreTolerance)
+				if ((hits1[i].Doc != hits2[i].Doc) || System.Math.Abs(hits1[i].Score - hits2[i].Score) > scoreTolerance)
 				{
-					Assert.Fail("Hit " + i + ", doc nrs " + hits1[i].doc + " and " + hits2[i].doc + "\nunequal       : " + hits1[i].score + "\n           and: " + hits2[i].score + "\nfor query:" + query.ToString());
+					Assert.Fail("Hit " + i + ", doc nrs " + hits1[i].Doc + " and " + hits2[i].Doc + "\nunequal       : " + hits1[i].Score + "\n           and: " + hits2[i].Score + "\nfor query:" + query.ToString());
 				}
 			}
 		}
@@ -252,7 +252,7 @@ namespace Lucene.Net.Search
 				sb.Append("hit=").Append(i).Append(':');
 				if (i < len1)
 				{
-					sb.Append(" doc").Append(hits1[i].doc).Append('=').Append(hits1[i].score);
+					sb.Append(" doc").Append(hits1[i].Doc).Append('=').Append(hits1[i].Score);
 				}
 				else
 				{
@@ -261,7 +261,7 @@ namespace Lucene.Net.Search
 				sb.Append(",\t");
 				if (i < len2)
 				{
-					sb.Append(" doc").Append(hits2[i].doc).Append('=').Append(hits2[i].score);
+					sb.Append(" doc").Append(hits2[i].Doc).Append('=').Append(hits2[i].Score);
 				}
 				sb.Append('\n');
 			}
@@ -282,9 +282,9 @@ namespace Lucene.Net.Search
 				sb.Append('\t');
 				sb.Append(i);
 				sb.Append(") doc=");
-				sb.Append(docs.ScoreDocs[i].doc);
+				sb.Append(docs.ScoreDocs[i].Doc);
 				sb.Append("\tscore=");
-				sb.Append(docs.ScoreDocs[i].score);
+				sb.Append(docs.ScoreDocs[i].Score);
 				sb.Append('\n');
 			}
 			return sb.ToString();

Modified: incubator/lucene.net/trunk/test/core/Search/Function/FunctionTestSetup.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Search/Function/FunctionTestSetup.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Search/Function/FunctionTestSetup.cs (original)
+++ incubator/lucene.net/trunk/test/core/Search/Function/FunctionTestSetup.cs Mon Mar 12 22:29:26 2012
@@ -16,6 +16,7 @@
  */
 
 using System;
+using Lucene.Net.Documents;
 using Lucene.Net.Support;
 using NUnit.Framework;
 
@@ -23,7 +24,6 @@ using Analyzer = Lucene.Net.Analysis.Ana
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Fieldable = Lucene.Net.Documents.Fieldable;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
@@ -118,23 +118,23 @@ namespace Lucene.Net.Search.Function
 		private void  AddDoc(IndexWriter iw, int i)
 		{
 			Document d = new Document();
-			Fieldable f;
+			IFieldable f;
 			int scoreAndID = i + 1;
 			
 			f = new Field(ID_FIELD, Id2String(scoreAndID), Field.Store.YES, Field.Index.NOT_ANALYZED); // for debug purposes
-			f.SetOmitNorms(true);
+			f.OmitNorms = true;
 			d.Add(f);
 			
 			f = new Field(TEXT_FIELD, "text of doc" + scoreAndID + TextLine(i), Field.Store.NO, Field.Index.ANALYZED); // for regular search
-			f.SetOmitNorms(true);
+			f.OmitNorms = true;
 			d.Add(f);
 			
 			f = new Field(INT_FIELD, "" + scoreAndID, Field.Store.NO, Field.Index.NOT_ANALYZED); // for function scoring
-			f.SetOmitNorms(true);
+			f.OmitNorms = true;
 			d.Add(f);
 			
 			f = new Field(FLOAT_FIELD, scoreAndID + ".000", Field.Store.NO, Field.Index.NOT_ANALYZED); // for function scoring
-			f.SetOmitNorms(true);
+			f.OmitNorms = true;
 			d.Add(f);
 			
 			iw.AddDocument(d);

Modified: incubator/lucene.net/trunk/test/core/Search/Function/TestCustomScoreQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Search/Function/TestCustomScoreQuery.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Search/Function/TestCustomScoreQuery.cs (original)
+++ incubator/lucene.net/trunk/test/core/Search/Function/TestCustomScoreQuery.cs Mon Mar 12 22:29:26 2012
@@ -216,7 +216,7 @@ namespace Lucene.Net.Search.Function
 
                 public override float CustomScore(int doc, float subScore, float valSrcScore)
                 {
-                    Assert.IsTrue(doc <= reader.MaxDoc());
+                    Assert.IsTrue(doc <= reader.MaxDoc);
                     return (float)values[doc];
                 }
             }
@@ -240,8 +240,8 @@ namespace Lucene.Net.Search.Function
             Assert.AreEqual(N_DOCS, hits.TotalHits);
             for(int i=0;i<N_DOCS;i++) 
             {
-                int doc = hits.ScoreDocs[i].doc;
-                float score = hits.ScoreDocs[i].score;
+                int doc = hits.ScoreDocs[i].Doc;
+                float score = hits.ScoreDocs[i].Score;
                 Assert.AreEqual(score, (float)1 + (4 * doc) % N_DOCS, 0.0001, "doc=" + doc);
             }
             s.Close();
@@ -263,25 +263,25 @@ namespace Lucene.Net.Search.Function
 			
 			// custom query, that should score the same as q1.
 			CustomScoreQuery q2CustomNeutral = new CustomScoreQuery(q1);
-			q2CustomNeutral.SetBoost(boost);
+			q2CustomNeutral.Boost = boost;
 			Log(q2CustomNeutral);
 			
 			// custom query, that should (by default) multiply the scores of q1 by that of the field
 			CustomScoreQuery q3CustomMul = new CustomScoreQuery(q1, qValSrc);
 			q3CustomMul.SetStrict(true);
-			q3CustomMul.SetBoost(boost);
+			q3CustomMul.Boost = boost;
 			Log(q3CustomMul);
 			
 			// custom query, that should add the scores of q1 to that of the field
 			CustomScoreQuery q4CustomAdd = new CustomAddQuery(q1, qValSrc);
 			q4CustomAdd.SetStrict(true);
-			q4CustomAdd.SetBoost(boost);
+			q4CustomAdd.Boost = boost;
 			Log(q4CustomAdd);
 			
 			// custom query, that multiplies and adds the field score to that of q1
 			CustomScoreQuery q5CustomMulAdd = new CustomMulAddQuery(q1, qValSrc, qValSrc);
 			q5CustomMulAdd.SetStrict(true);
-			q5CustomMulAdd.SetBoost(boost);
+			q5CustomMulAdd.Boost = boost;
 			Log(q5CustomMulAdd);
 			
 			// do al the searches 
@@ -320,7 +320,7 @@ namespace Lucene.Net.Search.Function
 				int doc = x;
 				Log("doc = " + doc);
 				
-				float fieldScore = ExpectedFieldScore(s.GetIndexReader().Document(doc).Get(ID_FIELD));
+				float fieldScore = ExpectedFieldScore(s.IndexReader.Document(doc).Get(ID_FIELD));
 				Log("fieldScore = " + fieldScore);
 				Assert.IsTrue(fieldScore > 0, "fieldScore should not be 0");
 				
@@ -360,7 +360,7 @@ namespace Lucene.Net.Search.Function
 			System.Collections.Hashtable h = new System.Collections.Hashtable();
 			for (int i = 0; i < td.TotalHits; i++)
 			{
-				h[(System.Int32) td.ScoreDocs[i].doc] = (float) td.ScoreDocs[i].score;
+				h[(System.Int32) td.ScoreDocs[i].Doc] = (float) td.ScoreDocs[i].Score;
 			}
 			return h;
 		}



Mime
View raw message