lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject svn commit: r1299911 [9/14] - in /incubator/lucene.net/trunk: src/core/ src/core/Analysis/ src/core/Analysis/Standard/ src/core/Analysis/Tokenattributes/ src/core/Document/ src/core/Index/ src/core/Messages/ src/core/QueryParser/ src/core/Search/ src/c...
Date Mon, 12 Mar 2012 22:29:37 GMT
Modified: incubator/lucene.net/trunk/src/core/Util/PriorityQueue.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Util/PriorityQueue.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Util/PriorityQueue.cs (original)
+++ incubator/lucene.net/trunk/src/core/Util/PriorityQueue.cs Mon Mar 12 22:29:26 2012
@@ -43,52 +43,50 @@ namespace Lucene.Net.Util
 		/// must define this one method. 
 		/// </summary>
 		public abstract bool LessThan(T a, T b);
-		
-		/// <summary> This method can be overridden by extending classes to return a sentinel
-		/// object which will be used by <see cref="Initialize(int)" /> to fill the queue, so
-		/// that the code which uses that queue can always assume it's full and only
-		/// change the top without attempting to insert any new object.<br/>
-		/// 
-		/// Those sentinel values should always compare worse than any non-sentinel
-		/// value (i.e., <see cref="LessThan" /> should always favor the
-		/// non-sentinel values).<br/>
-		/// 
-		/// By default, this method returns false, which means the queue will not be
-		/// filled with sentinel values. Otherwise, the value returned will be used to
-		/// pre-populate the queue. Adds sentinel values to the queue.<br/>
-		/// 
-		/// If this method is extended to return a non-null value, then the following
-		/// usage pattern is recommended:
-		/// 
-        /// <code>
-		/// // extends getSentinelObject() to return a non-null value.
-        /// PriorityQueue&lt;MyObject&gt; pq = new MyQueue&lt;MyObject&gt;(numHits);
-		/// // save the 'top' element, which is guaranteed to not be null.
-		/// MyObject pqTop = pq.top();
-		/// &lt;...&gt;
-		/// // now in order to add a new element, which is 'better' than top (after 
-		/// // you've verified it is better), it is as simple as:
-		/// pqTop.change().
-		/// pqTop = pq.updateTop();
-        /// </code>
-		/// 
-		/// <b>NOTE:</b> if this method returns a non-null value, it will be called by
-		/// <see cref="Initialize(int)" /> <see cref="Size()" /> times, relying on a new object to
-		/// be returned and will not check if it's null again. Therefore you should
-		/// ensure any call to this method creates a new instance and behaves
-		/// consistently, e.g., it cannot return null if it previously returned
-		/// non-null.
-		/// 
-		/// </summary>
-		/// <returns> the sentinel object to use to pre-populate the queue, or null if
-		/// sentinel objects are not supported.
-		/// </returns>
-		protected internal virtual T GetSentinelObject()
-		{
-			return default(T);
-		}
-		
-		/// <summary>Subclass constructors must call this. </summary>
+
+	    /// <summary> This method can be overridden by extending classes to return a sentinel
+	    /// object which will be used by <see cref="Initialize(int)" /> to fill the queue, so
+	    /// that the code which uses that queue can always assume it's full and only
+	    /// change the top without attempting to insert any new object.<br/>
+	    /// 
+	    /// Those sentinel values should always compare worse than any non-sentinel
+	    /// value (i.e., <see cref="LessThan" /> should always favor the
+	    /// non-sentinel values).<br/>
+	    /// 
+	    /// By default, this method returns false, which means the queue will not be
+	    /// filled with sentinel values. Otherwise, the value returned will be used to
+	    /// pre-populate the queue. Adds sentinel values to the queue.<br/>
+	    /// 
+	    /// If this method is extended to return a non-null value, then the following
+	    /// usage pattern is recommended:
+	    /// 
+	    /// <code>
+	    /// // extends getSentinelObject() to return a non-null value.
+	    /// PriorityQueue&lt;MyObject&gt; pq = new MyQueue&lt;MyObject&gt;(numHits);
+	    /// // save the 'top' element, which is guaranteed to not be null.
+	    /// MyObject pqTop = pq.top();
+	    /// &lt;...&gt;
+	    /// // now in order to add a new element, which is 'better' than top (after 
+	    /// // you've verified it is better), it is as simple as:
+	    /// pqTop.change().
+	    /// pqTop = pq.updateTop();
+	    /// </code>
+	    /// 
+	    /// <b>NOTE:</b> if this method returns a non-null value, it will be called by
+	    /// <see cref="Initialize(int)" /> <see cref="Size()" /> times, relying on a new object to
+	    /// be returned and will not check if it's null again. Therefore you should
+	    /// ensure any call to this method creates a new instance and behaves
+	    /// consistently, e.g., it cannot return null if it previously returned
+	    /// non-null.
+	    /// 
+	    /// </summary>
+	    /// <value> the sentinel object to use to pre-populate the queue, or null if sentinel objects are not supported. </value>
+	    protected internal virtual T SentinelObject
+	    {
+	        get { return default(T); }
+	    }
+
+	    /// <summary>Subclass constructors must call this. </summary>
 		protected internal void  Initialize(int maxSize)
 		{
 			size = 0;
@@ -121,13 +119,13 @@ namespace Lucene.Net.Util
 			this.maxSize = maxSize;
 			
 			// If sentinel objects are supported, populate the queue with them
-			T sentinel = GetSentinelObject();
+			T sentinel = SentinelObject;
 			if (sentinel != null)
 			{
 				heap[1] = sentinel;
 				for (int i = 2; i < heap.Length; i++)
 				{
-					heap[i] = GetSentinelObject();
+					heap[i] = SentinelObject;
 				}
 				size = maxSize;
 			}

Modified: incubator/lucene.net/trunk/src/core/Util/RamUsageEstimator.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Util/RamUsageEstimator.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Util/RamUsageEstimator.cs (original)
+++ incubator/lucene.net/trunk/src/core/Util/RamUsageEstimator.cs Mon Mar 12 22:29:26 2012
@@ -81,9 +81,9 @@ namespace Lucene.Net.Util
 			// Use Map rather than Set so that we can use an IdentityHashMap - not
 			// seeing an IdentityHashSet
             seen = new IdentityDictionary<object, object>(64);
-			this.refSize = memoryModel.GetReferenceSize();
-			this.arraySize = memoryModel.GetArraySize();
-			this.classSize = memoryModel.GetClassSize();
+			this.refSize = memoryModel.ReferenceSize;
+			this.arraySize = memoryModel.ArraySize;
+			this.classSize = memoryModel.ClassSize;
 		}
 		
 		public long EstimateRamUsage(System.Object obj)

Modified: incubator/lucene.net/trunk/src/core/Util/ReaderUtil.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Util/ReaderUtil.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Util/ReaderUtil.cs (original)
+++ incubator/lucene.net/trunk/src/core/Util/ReaderUtil.cs Mon Mar 12 22:29:26 2012
@@ -31,7 +31,7 @@ namespace Lucene.Net.Util
 		/// <param name="reader"></param>
 		public static void GatherSubReaders(System.Collections.Generic.IList<IndexReader> allSubReaders, IndexReader reader)
 		{
-			IndexReader[] subReaders = reader.GetSequentialSubReaders();
+			IndexReader[] subReaders = reader.SequentialSubReaders;
 			if (subReaders == null)
 			{
 				// Add the reader itself, and do not recurse
@@ -65,7 +65,7 @@ namespace Lucene.Net.Util
 			for (int i = 0; i < subReaders.Length; i++)
 			{
 				docStarts[i] = maxDoc;
-				maxDoc += subReaders[i].MaxDoc();
+				maxDoc += subReaders[i].MaxDoc;
 			}
 			return subReaders[ReaderUtil.SubIndex(doc, docStarts)];
 		}

Modified: incubator/lucene.net/trunk/src/core/Util/SortedVIntList.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Util/SortedVIntList.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Util/SortedVIntList.cs (original)
+++ incubator/lucene.net/trunk/src/core/Util/SortedVIntList.cs Mon Mar 12 22:29:26 2012
@@ -260,28 +260,26 @@ namespace Lucene.Net.Util
 		private const int VB1 = 0x7F;
 		private const int BIT_SHIFT = 7;
 		private int MAX_BYTES_PER_INT = (31 / BIT_SHIFT) + 1;
-		
-		/// <returns>    The total number of sorted integers.
-		/// </returns>
-		public virtual int Size()
-		{
-			return size;
-		}
-		
-		/// <returns> The size of the byte array storing the compressed sorted integers.
-		/// </returns>
-		public virtual int GetByteSize()
-		{
-			return bytes.Length;
-		}
-		
-		/// <summary>This DocIdSet implementation is cacheable. </summary>
-		public override bool IsCacheable()
-		{
-			return true;
-		}
-		
-		/// <returns>    An iterator over the sorted integers.
+
+	    /// <value> The total number of sorted integers. </value>
+	    public virtual int Size
+	    {
+	        get { return size; }
+	    }
+
+	    /// <value> The size of the byte array storing the compressed sorted integers. </value>
+	    public virtual int ByteSize
+	    {
+	        get { return bytes.Length; }
+	    }
+
+	    /// <summary>This DocIdSet implementation is cacheable. </summary>
+	    public override bool IsCacheable
+	    {
+	        get { return true; }
+	    }
+
+	    /// <returns>    An iterator over the sorted integers.
 		/// </returns>
 		public override DocIdSetIterator Iterator()
 		{

Modified: incubator/lucene.net/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs Mon Mar 12 22:29:26 2012
@@ -118,7 +118,7 @@ namespace Lucene.Net.Analysis
                 termAtt.SetTermBuffer("bogusTerm");
                 if (offsetAtt != null) offsetAtt.SetOffset(14584724, 24683243);
                 if (typeAtt != null) typeAtt.SetType("bogusType");
-                if (posIncrAtt != null) posIncrAtt.SetPositionIncrement(45987657);
+                if (posIncrAtt != null) posIncrAtt.PositionIncrement = 45987657;
 
                 checkClearAtt.GetAndResetClearCalled(); // reset it, because we called clearAttribute() before
                 Assert.IsTrue(ts.IncrementToken(), "token " + i + " does not exist");
@@ -132,7 +132,7 @@ namespace Lucene.Net.Analysis
                 if (types != null)
                     Assert.AreEqual(types[i], typeAtt.Type(), "type " + i);
                 if (posIncrements != null)
-                    Assert.AreEqual(posIncrements[i], posIncrAtt.GetPositionIncrement(), "posIncrement " + i);
+                    Assert.AreEqual(posIncrements[i], posIncrAtt.PositionIncrement, "posIncrement " + i);
             }
             Assert.IsFalse(ts.IncrementToken(), "end of stream");
             ts.End();

Modified: incubator/lucene.net/trunk/test/core/Analysis/TestAnalyzers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/TestAnalyzers.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/TestAnalyzers.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/TestAnalyzers.cs Mon Mar 12 22:29:26 2012
@@ -82,7 +82,7 @@ namespace Lucene.Net.Analysis
 					break;
 				// System.out.println("id="+System.identityHashCode(nextToken) + " " + t);
 				// System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
-				Assert.AreEqual(b, payloadAtt.GetPayload().ToByteArray()[0]);
+				Assert.AreEqual(b, payloadAtt.Payload.ToByteArray()[0]);
 			}
 		}
 		
@@ -181,7 +181,7 @@ namespace Lucene.Net.Analysis
 			bool hasNext = input.IncrementToken();
 			if (!hasNext)
 				return false;
-			payloadAtt.SetPayload(p); // reuse the payload / byte[]
+			payloadAtt.Payload = p; // reuse the payload / byte[]
 			data[0]++;
 			return true;
 		}

Modified: incubator/lucene.net/trunk/test/core/Analysis/TestStandardAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/TestStandardAnalyzer.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/TestStandardAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/TestStandardAnalyzer.cs Mon Mar 12 22:29:26 2012
@@ -39,7 +39,7 @@ namespace Lucene.Net.Analysis
 		public virtual void  TestMaxTermLength()
 		{
             StandardAnalyzer sa = new StandardAnalyzer(Version.LUCENE_CURRENT);
-			sa.SetMaxTokenLength(5);
+			sa.MaxTokenLength = 5;
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "xy", "z"});
 		}
 		
@@ -48,7 +48,7 @@ namespace Lucene.Net.Analysis
 		{
             StandardAnalyzer sa = new StandardAnalyzer(Version.LUCENE_CURRENT);
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "toolong", "xy", "z"});
-			sa.SetMaxTokenLength(5);
+			sa.MaxTokenLength = 5;
 			
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "xy", "z"}, new int[]{1, 1, 2, 1});
 		}

Modified: incubator/lucene.net/trunk/test/core/Analysis/TestStopAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/TestStopAnalyzer.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/TestStopAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/TestStopAnalyzer.cs Mon Mar 12 22:29:26 2012
@@ -81,7 +81,7 @@ namespace Lucene.Net.Analysis
 			{
 				System.String text = termAtt.Term();
 				Assert.IsFalse(stopWordsSet.Contains(text));
-                Assert.AreEqual(1, posIncrAtt.GetPositionIncrement()); // in 2.4 stop tokenizer does not apply increments.
+                Assert.AreEqual(1, posIncrAtt.PositionIncrement); // in 2.4 stop tokenizer does not apply increments.
 			}
 		}
 		
@@ -105,7 +105,7 @@ namespace Lucene.Net.Analysis
             {
                 string text = termAtt.Term();
                 Assert.IsFalse(stopWordsSet.Contains(text));
-                Assert.AreEqual(expectedIncr[i++], posIncrAtt.GetPositionIncrement());
+                Assert.AreEqual(expectedIncr[i++], posIncrAtt.PositionIncrement);
             }
         }
 	}

Modified: incubator/lucene.net/trunk/test/core/Analysis/TestStopFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/TestStopFilter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/TestStopFilter.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/TestStopFilter.cs Mon Mar 12 22:29:26 2012
@@ -126,7 +126,7 @@ namespace Lucene.Net.Analysis
 			var stopSet1 = StopFilter.MakeStopSet(stopWords1);
 			reader = new System.IO.StringReader(sb.ToString());
 			StopFilter stpf0 = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet0); // first part of the set
-			stpf0.SetEnablePositionIncrements(true);
+			stpf0.EnablePositionIncrements = true;
 			StopFilter stpf01 = new StopFilter(false, stpf0, stopSet1); // two stop filters concatenated!
 			DoTestStopPositons(stpf01, true);
 		}
@@ -134,7 +134,7 @@ namespace Lucene.Net.Analysis
 		private void  DoTestStopPositons(StopFilter stpf, bool enableIcrements)
 		{
 			Log("---> test with enable-increments-" + (enableIcrements?"enabled":"disabled"));
-			stpf.SetEnablePositionIncrements(enableIcrements);
+			stpf.EnablePositionIncrements = enableIcrements;
             TermAttribute termAtt = stpf.GetAttribute<TermAttribute>();
             PositionIncrementAttribute posIncrAtt = stpf.GetAttribute<PositionIncrementAttribute>();
 			for (int i = 0; i < 20; i += 3)
@@ -143,7 +143,7 @@ namespace Lucene.Net.Analysis
 				Log("Token " + i + ": " + stpf);
 				System.String w = English.IntToEnglish(i).Trim();
 				Assert.AreEqual(w, termAtt.Term(), "expecting token " + i + " to be " + w);
-				Assert.AreEqual(enableIcrements?(i == 0?1:3):1, posIncrAtt.GetPositionIncrement(), "all but first token must have position increment of 3");
+				Assert.AreEqual(enableIcrements?(i == 0?1:3):1, posIncrAtt.PositionIncrement, "all but first token must have position increment of 3");
 			}
 			Assert.IsFalse(stpf.IncrementToken());
 		}

Modified: incubator/lucene.net/trunk/test/core/Analysis/TestTeeSinkTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/TestTeeSinkTokenFilter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/TestTeeSinkTokenFilter.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/TestTeeSinkTokenFilter.cs Mon Mar 12 22:29:26 2012
@@ -176,13 +176,13 @@ namespace Lucene.Net.Analysis
                         PositionIncrementAttribute posIncrAtt = stream.GetAttribute<PositionIncrementAttribute>();
                         while (stream.IncrementToken())
                         {
-                            tfPos += posIncrAtt.GetPositionIncrement();
+                            tfPos += posIncrAtt.PositionIncrement;
                         }
                         stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
                         posIncrAtt = stream.GetAttribute<PositionIncrementAttribute>();
                         while (stream.IncrementToken())
                         {
-                            tfPos += posIncrAtt.GetPositionIncrement();
+                            tfPos += posIncrAtt.PositionIncrement;
                         }
                     }
                     long finish = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
@@ -197,13 +197,13 @@ namespace Lucene.Net.Analysis
                         PositionIncrementAttribute posIncrAtt = teeStream.GetAttribute<PositionIncrementAttribute>();
                         while (teeStream.IncrementToken())
                         {
-                            sinkPos += posIncrAtt.GetPositionIncrement();
+                            sinkPos += posIncrAtt.PositionIncrement;
                         }
                         //System.out.println("Modulo--------");
                         posIncrAtt = sink.GetAttribute<PositionIncrementAttribute>();
                         while (sink.IncrementToken())
                         {
-                            sinkPos += posIncrAtt.GetPositionIncrement();
+                            sinkPos += posIncrAtt.PositionIncrement;
                         }
                     }
                     finish = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);

Modified: incubator/lucene.net/trunk/test/core/Analysis/TestToken.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/TestToken.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/TestToken.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/TestToken.cs Mon Mar 12 22:29:26 2012
@@ -50,26 +50,26 @@ namespace Lucene.Net.Analysis
 			Assert.AreNotEqual(t.TermBuffer(), content);
 			Assert.AreEqual("hello", t.Term());
 			Assert.AreEqual("word", t.Type());
-			Assert.AreEqual(0, t.GetFlags());
+			Assert.AreEqual(0, t.Flags);
 			
 			t = new Token(6, 22);
 			t.SetTermBuffer(content, 0, content.Length);
 			Assert.AreEqual("hello", t.Term());
 			Assert.AreEqual("(hello,6,22)", t.ToString());
 			Assert.AreEqual("word", t.Type());
-			Assert.AreEqual(0, t.GetFlags());
+			Assert.AreEqual(0, t.Flags);
 			
 			t = new Token(6, 22, 7);
 			t.SetTermBuffer(content, 0, content.Length);
 			Assert.AreEqual("hello", t.Term());
 			Assert.AreEqual("(hello,6,22)", t.ToString());
-			Assert.AreEqual(7, t.GetFlags());
+			Assert.AreEqual(7, t.Flags);
 			
 			t = new Token(6, 22, "junk");
 			t.SetTermBuffer(content, 0, content.Length);
 			Assert.AreEqual("hello", t.Term());
 			Assert.AreEqual("(hello,6,22,type=junk)", t.ToString());
-			Assert.AreEqual(0, t.GetFlags());
+			Assert.AreEqual(0, t.Flags);
 		}
 		
         [Test]
@@ -217,10 +217,10 @@ namespace Lucene.Net.Analysis
             Assert.AreNotSame(buf, copy.TermBuffer());
 			
 			Payload pl = new Payload(new byte[]{1, 2, 3, 4});
-			t.SetPayload(pl);
+			t.Payload = pl;
 			copy = (Token) TestSimpleAttributeImpls.AssertCloneIsEqual(t);
-			Assert.AreEqual(pl, copy.GetPayload());
-			Assert.AreNotSame(pl, copy.GetPayload());
+			Assert.AreEqual(pl, copy.Payload);
+			Assert.AreNotSame(pl, copy.Payload);
 		}
 		
         [Test]
@@ -240,10 +240,10 @@ namespace Lucene.Net.Analysis
 			Assert.AreNotSame(buf, copy.TermBuffer());
 			
 			Payload pl = new Payload(new byte[]{1, 2, 3, 4});
-			t.SetPayload(pl);
+			t.Payload = pl;
 			copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
-			Assert.AreEqual(pl, copy.GetPayload());
-            Assert.AreNotSame(pl, copy.GetPayload());
+			Assert.AreEqual(pl, copy.Payload);
+            Assert.AreNotSame(pl, copy.Payload);
 		}
 
         public interface SenselessAttribute : Attribute {}

Modified: incubator/lucene.net/trunk/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs (original)
+++ incubator/lucene.net/trunk/test/core/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs Mon Mar 12 22:29:26 2012
@@ -38,38 +38,38 @@ namespace Lucene.Net.Analysis.Tokenattri
 		public virtual void  TestFlagsAttribute()
 		{
 			FlagsAttributeImpl att = new FlagsAttributeImpl();
-			Assert.AreEqual(0, att.GetFlags());
+			Assert.AreEqual(0, att.Flags);
 			
-			att.SetFlags(1234);
+			att.Flags = 1234;
 			Assert.AreEqual("flags=1234", att.ToString());
 			
 			FlagsAttributeImpl att2 = (FlagsAttributeImpl) AssertCloneIsEqual(att);
-			Assert.AreEqual(1234, att2.GetFlags());
+			Assert.AreEqual(1234, att2.Flags);
 			
 			att2 = (FlagsAttributeImpl) AssertCopyIsEqual(att);
-			Assert.AreEqual(1234, att2.GetFlags());
+			Assert.AreEqual(1234, att2.Flags);
 			
 			att.Clear();
-			Assert.AreEqual(0, att.GetFlags());
+			Assert.AreEqual(0, att.Flags);
 		}
 		
         [Test]
 		public virtual void  TestPositionIncrementAttribute()
 		{
 			PositionIncrementAttributeImpl att = new PositionIncrementAttributeImpl();
-			Assert.AreEqual(1, att.GetPositionIncrement());
+			Assert.AreEqual(1, att.PositionIncrement);
 			
-			att.SetPositionIncrement(1234);
+			att.PositionIncrement = 1234;
 			Assert.AreEqual("positionIncrement=1234", att.ToString());
 			
 			PositionIncrementAttributeImpl att2 = (PositionIncrementAttributeImpl) AssertCloneIsEqual(att);
-			Assert.AreEqual(1234, att2.GetPositionIncrement());
+			Assert.AreEqual(1234, att2.PositionIncrement);
 			
 			att2 = (PositionIncrementAttributeImpl) AssertCopyIsEqual(att);
-			Assert.AreEqual(1234, att2.GetPositionIncrement());
+			Assert.AreEqual(1234, att2.PositionIncrement);
 			
 			att.Clear();
-			Assert.AreEqual(1, att.GetPositionIncrement());
+			Assert.AreEqual(1, att.PositionIncrement);
 		}
 		
         [Test]
@@ -95,21 +95,21 @@ namespace Lucene.Net.Analysis.Tokenattri
 		public virtual void  TestPayloadAttribute()
 		{
 			PayloadAttributeImpl att = new PayloadAttributeImpl();
-			Assert.IsNull(att.GetPayload());
+			Assert.IsNull(att.Payload);
 			
 			Payload pl = new Payload(new byte[]{1, 2, 3, 4});
-			att.SetPayload(pl);
+			att.Payload = pl;
 			
 			PayloadAttributeImpl att2 = (PayloadAttributeImpl) AssertCloneIsEqual(att);
-			Assert.AreEqual(pl, att2.GetPayload());
-			Assert.AreNotSame(pl, att2.GetPayload());
+			Assert.AreEqual(pl, att2.Payload);
+			Assert.AreNotSame(pl, att2.Payload);
 			
 			att2 = (PayloadAttributeImpl) AssertCopyIsEqual(att);
-			Assert.AreEqual(pl, att2.GetPayload());
-            Assert.AreNotSame(pl, att2.GetPayload());
+			Assert.AreEqual(pl, att2.Payload);
+            Assert.AreNotSame(pl, att2.Payload);
 			
 			att.Clear();
-			Assert.IsNull(att.GetPayload());
+			Assert.IsNull(att.Payload);
 		}
 		
         [Test]

Modified: incubator/lucene.net/trunk/test/core/Document/TestBinaryDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Document/TestBinaryDocument.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Document/TestBinaryDocument.cs (original)
+++ incubator/lucene.net/trunk/test/core/Document/TestBinaryDocument.cs Mon Mar 12 22:29:26 2012
@@ -39,8 +39,8 @@ namespace Lucene.Net.Documents
         [Test]
 		public virtual void  TestBinaryFieldInIndex()
 		{
-			Fieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
-			Fieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);
+			IFieldable binaryFldStored = new Field("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(binaryValStored), Field.Store.YES);
+			IFieldable stringFldStored = new Field("stringStored", binaryValStored, Field.Store.YES, Field.Index.NO, Field.TermVector.NO);
 			
 			try
 			{
@@ -83,7 +83,7 @@ namespace Lucene.Net.Documents
 			
 			/** delete the document from index */
 			reader.DeleteDocument(0);
-			Assert.AreEqual(0, reader.NumDocs());
+			Assert.AreEqual(0, reader.NumDocs);
 			
 			reader.Close();
 			dir.Close();
@@ -92,8 +92,8 @@ namespace Lucene.Net.Documents
         [Test]
 		public virtual void  TestCompressionTools()
 		{
-			Fieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.Compress(System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed)), Field.Store.YES);
-			Fieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.CompressString(binaryValCompressed), Field.Store.YES);
+			IFieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.Compress(System.Text.UTF8Encoding.UTF8.GetBytes(binaryValCompressed)), Field.Store.YES);
+			IFieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.CompressString(binaryValCompressed), Field.Store.YES);
 			
 			Document doc = new Document();
 			

Modified: incubator/lucene.net/trunk/test/core/Document/TestDocument.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Document/TestDocument.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Document/TestDocument.cs (original)
+++ incubator/lucene.net/trunk/test/core/Document/TestDocument.cs Mon Mar 12 22:29:26 2012
@@ -45,19 +45,19 @@ namespace Lucene.Net.Documents
 		public virtual void  TestBinaryField()
 		{
 			Document doc = new Document();
-			Fieldable stringFld = new Field("string", binaryVal, Field.Store.YES, Field.Index.NO);
-			Fieldable binaryFld = new Field("binary", System.Text.UTF8Encoding.UTF8.GetBytes(binaryVal), Field.Store.YES);
-			Fieldable binaryFld2 = new Field("binary", System.Text.UTF8Encoding.UTF8.GetBytes(binaryVal2), Field.Store.YES);
+			IFieldable stringFld = new Field("string", binaryVal, Field.Store.YES, Field.Index.NO);
+			IFieldable binaryFld = new Field("binary", System.Text.UTF8Encoding.UTF8.GetBytes(binaryVal), Field.Store.YES);
+			IFieldable binaryFld2 = new Field("binary", System.Text.UTF8Encoding.UTF8.GetBytes(binaryVal2), Field.Store.YES);
 			
 			doc.Add(stringFld);
 			doc.Add(binaryFld);
 			
 			Assert.AreEqual(2, doc.fields_ForNUnit.Count);
 			
-			Assert.IsTrue(binaryFld.IsBinary());
-			Assert.IsTrue(binaryFld.IsStored());
-			Assert.IsFalse(binaryFld.IsIndexed());
-			Assert.IsFalse(binaryFld.IsTokenized());
+			Assert.IsTrue(binaryFld.IsBinary);
+			Assert.IsTrue(binaryFld.IsStored);
+			Assert.IsFalse(binaryFld.IsIndexed);
+			Assert.IsFalse(binaryFld.IsTokenized);
 			
 			System.String binaryTest = new System.String(System.Text.UTF8Encoding.UTF8.GetChars(doc.GetBinaryValue("binary")));
 			Assert.IsTrue(binaryTest.Equals(binaryVal));
@@ -178,7 +178,7 @@ namespace Lucene.Net.Documents
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			
-			DoAssert(searcher.Doc(hits[0].doc), true);
+			DoAssert(searcher.Doc(hits[0].Doc), true);
 			searcher.Close();
 		}
 		
@@ -256,13 +256,13 @@ namespace Lucene.Net.Documents
 			int result = 0;
 			for (int i = 0; i < 3; i++)
 			{
-				Document doc2 = searcher.Doc(hits[i].doc);
+				Document doc2 = searcher.Doc(hits[i].Doc);
 				Field f = doc2.GetField("id");
-				if (f.StringValue().Equals("id1"))
+				if (f.StringValue.Equals("id1"))
 					result |= 1;
-				else if (f.StringValue().Equals("id2"))
+				else if (f.StringValue.Equals("id2"))
 					result |= 2;
-				else if (f.StringValue().Equals("id3"))
+				else if (f.StringValue.Equals("id3"))
 					result |= 4;
 				else
 					Assert.Fail("unexpected id field");

Modified: incubator/lucene.net/trunk/test/core/Index/DocHelper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/DocHelper.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/DocHelper.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/DocHelper.cs Mon Mar 12 22:29:26 2012
@@ -16,12 +16,11 @@
  */
 
 using System;
-
+using Lucene.Net.Documents;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Fieldable = Lucene.Net.Documents.Fieldable;
 using Directory = Lucene.Net.Store.Directory;
 using Similarity = Lucene.Net.Search.Similarity;
 
@@ -115,9 +114,9 @@ namespace Lucene.Net.Index
 		public static System.Collections.IDictionary noTf = new System.Collections.Hashtable();
 		
 		
-		private static void  Add(System.Collections.IDictionary map, Fieldable field)
+		private static void  Add(System.Collections.IDictionary map, IFieldable field)
 		{
-			map[field.Name()] = field;
+			map[field.Name] = field;
 		}
 		
 		/// <summary> Adds the fields above to a document </summary>
@@ -142,7 +141,7 @@ namespace Lucene.Net.Index
 		/// <throws>  IOException </throws>
 		public static SegmentInfo WriteDoc(Directory dir, Document doc)
 		{
-			return WriteDoc(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), doc);
+			return WriteDoc(dir, new WhitespaceAnalyzer(), Similarity.Default, doc);
 		}
 		
 		/// <summary> Writes the document to the directory using the analyzer
@@ -180,13 +179,13 @@ namespace Lucene.Net.Index
 			textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
 			textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, Field.Store.YES, Field.Index.ANALYZED);
 			{
-				textField3.SetOmitNorms(true);
+				textField3.OmitNorms = true;
 			}
 			keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, Field.Store.YES, Field.Index.NOT_ANALYZED);
 			noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS);
 			noTFField = new Field(NO_TF_KEY, NO_TF_TEXT, Field.Store.YES, Field.Index.ANALYZED);
 			{
-				noTFField.SetOmitTermFreqAndPositions(true);
+				noTFField.OmitTermFreqAndPositions = true;
 			}
 			unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, Field.Store.YES, Field.Index.NO);
 			unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO);
@@ -218,25 +217,25 @@ namespace Lucene.Net.Index
 				fields[fields.Length - 1] = largeLazyField;
 				for (int i = 0; i < fields.Length; i++)
 				{
-					Fieldable f = fields[i];
+					IFieldable f = fields[i];
 					Add(all, f);
-					if (f.IsIndexed())
+					if (f.IsIndexed)
 						Add(indexed, f);
 					else
 						Add(unindexed, f);
-					if (f.IsTermVectorStored())
+					if (f.IsTermVectorStored)
 						Add(termvector, f);
-					if (f.IsIndexed() && !f.IsTermVectorStored())
+					if (f.IsIndexed && !f.IsTermVectorStored)
 						Add(notermvector, f);
-					if (f.IsStored())
+					if (f.IsStored)
 						Add(stored, f);
 					else
 						Add(unstored, f);
-					if (f.GetOmitNorms())
+					if (f.OmitNorms)
 						Add(noNorms, f);
-					if (f.GetOmitTermFreqAndPositions())
+					if (f.OmitTermFreqAndPositions)
 						Add(noTf, f);
-					if (f.IsLazy())
+					if (f.IsLazy)
 						Add(lazy, f);
 				}
 			}

Modified: incubator/lucene.net/trunk/test/core/Index/TestAddIndexesNoOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestAddIndexesNoOptimize.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestAddIndexesNoOptimize.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestAddIndexesNoOptimize.cs Mon Mar 12 22:29:26 2012
@@ -52,7 +52,7 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			writer = NewWriter(aux, true);
-			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.UseCompoundFile = false; // use one without a compound file
 			// add 40 documents in separate files
 			AddDocs(writer, 40);
             Assert.AreEqual(40, writer.MaxDoc());
@@ -267,13 +267,13 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			writer = NewWriter(aux, true);
-			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.UseCompoundFile = false; // use one without a compound file
 			writer.SetMaxBufferedDocs(1000);
 			// add 140 documents in separate files
 			AddDocs(writer, 40);
 			writer.Close();
 			writer = NewWriter(aux, true);
-			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.UseCompoundFile = false; // use one without a compound file
 			writer.SetMaxBufferedDocs(1000);
 			AddDocs(writer, 100);
 			writer.Close();
@@ -389,7 +389,7 @@ namespace Lucene.Net.Index
 			{
 				reader.DeleteDocument(i);
 			}
-			Assert.AreEqual(10, reader.NumDocs());
+			Assert.AreEqual(10, reader.NumDocs);
 			reader.Close();
 			
 			IndexWriter writer = NewWriter(dir, false);
@@ -430,7 +430,7 @@ namespace Lucene.Net.Index
 			{
 				reader.DeleteDocument(i);
 			}
-			Assert.AreEqual(3, reader.NumDocs());
+			Assert.AreEqual(3, reader.NumDocs);
 			reader.Close();
 			
 			reader = IndexReader.Open(aux2, false);
@@ -438,7 +438,7 @@ namespace Lucene.Net.Index
 			{
 				reader.DeleteDocument(i);
 			}
-			Assert.AreEqual(22, reader.NumDocs());
+			Assert.AreEqual(22, reader.NumDocs);
 			reader.Close();
 			
 			writer = NewWriter(dir, false);
@@ -484,8 +484,8 @@ namespace Lucene.Net.Index
 		private void  VerifyNumDocs(Directory dir, int numDocs)
 		{
 			IndexReader reader = IndexReader.Open(dir, true);
-			Assert.AreEqual(numDocs, reader.MaxDoc());
-			Assert.AreEqual(numDocs, reader.NumDocs());
+			Assert.AreEqual(numDocs, reader.MaxDoc);
+			Assert.AreEqual(numDocs, reader.NumDocs);
 			reader.Close();
 		}
 		
@@ -513,7 +513,7 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			writer = NewWriter(aux, true);
-			writer.SetUseCompoundFile(false); // use one without a compound file
+			writer.UseCompoundFile = false; // use one without a compound file
 			writer.SetMaxBufferedDocs(100);
 			writer.SetMergeFactor(10);
 			// add 30 documents in 3 segments
@@ -522,7 +522,7 @@ namespace Lucene.Net.Index
 				AddDocs(writer, 10);
 				writer.Close();
 				writer = NewWriter(aux, false);
-				writer.SetUseCompoundFile(false); // use one without a compound file
+				writer.UseCompoundFile = false; // use one without a compound file
 				writer.SetMaxBufferedDocs(100);
 				writer.SetMergeFactor(10);
 			}
@@ -540,7 +540,7 @@ namespace Lucene.Net.Index
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetMergePolicy(new LogByteSizeMergePolicy(writer));
 			writer.SetMaxBufferedDocs(5);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			writer.SetMergeFactor(100);
 			
 			Document doc = new Document();
@@ -560,10 +560,10 @@ namespace Lucene.Net.Index
 			Directory dir2 = new MockRAMDirectory();
 			writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
-			lmp.SetMinMergeMB(0.0001);
+			lmp.MinMergeMB = 0.0001;
 			writer.SetMergePolicy(lmp);
 			writer.SetMergeFactor(4);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			writer.SetMergeScheduler(new SerialMergeScheduler());
 			writer.AddIndexesNoOptimize(new Directory[]{dir});
 			writer.Close();
@@ -578,15 +578,15 @@ namespace Lucene.Net.Index
 		{
 			Directory dir = new RAMDirectory();
 			IndexWriter writer = NewWriter(dir, true);
-			writer.SetUseCompoundFile(false);
+			writer.UseCompoundFile = false;
 			AddDocs(writer, 1);
 			writer.Close();
 			
 			Directory other = new RAMDirectory();
 			writer = NewWriter(other, true);
-			writer.SetUseCompoundFile(true);
+			writer.UseCompoundFile = true;
 			writer.AddIndexesNoOptimize(new Directory[]{dir});
-			Assert.IsTrue(writer.NewestSegment().GetUseCompoundFile());
+			Assert.IsTrue(writer.NewestSegment().UseCompoundFile);
 			writer.Close();
 		}
 	}

Modified: incubator/lucene.net/trunk/test/core/Index/TestAtomicUpdate.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestAtomicUpdate.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestAtomicUpdate.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestAtomicUpdate.cs Mon Mar 12 22:29:26 2012
@@ -149,7 +149,7 @@ namespace Lucene.Net.Index
 			public override void  DoWork()
 			{
 				IndexReader r = IndexReader.Open(directory, true);
-				Assert.AreEqual(100, r.NumDocs());
+				Assert.AreEqual(100, r.NumDocs);
 				r.Close();
 			}
 		}
@@ -182,7 +182,7 @@ namespace Lucene.Net.Index
 			writer.Commit();
 			
 			IndexReader r = IndexReader.Open(directory, true);
-			Assert.AreEqual(100, r.NumDocs());
+			Assert.AreEqual(100, r.NumDocs);
 			r.Close();
 			
 			IndexerThread indexerThread = new IndexerThread(writer, threads);

Modified: incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs Mon Mar 12 22:29:26 2012
@@ -158,31 +158,31 @@ namespace Lucene.Net.Index
                 }
 
                 // test that decompression works correctly
-                for (int i = 0; i < reader.MaxDoc(); i++)
+                for (int i = 0; i < reader.MaxDoc; i++)
                 {
                     if (!reader.IsDeleted(i))
                     {
                         Document d = reader.Document(i);
                         if (d.Get("content3") != null) continue;
                         count++;
-                        Fieldable compressed = d.GetFieldable("compressed");
+                        IFieldable compressed = d.GetFieldable("compressed");
                         if (int.Parse(d.Get("id"))%2 == 0)
                         {
-                            Assert.IsFalse(compressed.IsBinary());
-                            Assert.AreEqual(TEXT_TO_COMPRESS, compressed.StringValue(),
+                            Assert.IsFalse(compressed.IsBinary);
+                            Assert.AreEqual(TEXT_TO_COMPRESS, compressed.StringValue,
                                             "incorrectly decompressed string");
                         }
                         else
                         {
-                            Assert.IsTrue(compressed.IsBinary());
-                            Assert.IsTrue(BINARY_TO_COMPRESS.SequenceEqual(compressed.GetBinaryValue()),
+                            Assert.IsTrue(compressed.IsBinary);
+                            Assert.IsTrue(BINARY_TO_COMPRESS.SequenceEqual(compressed.BinaryValue),
                                           "incorrectly decompressed binary");
                         }
                     }
                 }
 
                 //check if field was decompressed after optimize
-                for (int i = 0; i < reader.MaxDoc(); i++)
+                for (int i = 0; i < reader.MaxDoc; i++)
                 {
                     if (!reader.IsDeleted(i))
                     {
@@ -191,7 +191,7 @@ namespace Lucene.Net.Index
                         count++;
                         // read the size from the binary value using BinaryReader (this prevents us from doing the shift ops ourselves):
                         // ugh, Java uses Big-Endian streams, so we need to do it manually.
-                        byte[] encodedSize = d.GetFieldable("compressed").GetBinaryValue().Take(4).Reverse().ToArray();
+                        byte[] encodedSize = d.GetFieldable("compressed").BinaryValue.Take(4).Reverse().ToArray();
                         int actualSize = BitConverter.ToInt32(encodedSize, 0);
                         int compressedSize = int.Parse(d.Get("compressedSize"));
                         bool binary = int.Parse(d.Get("id"))%2 > 0;
@@ -291,8 +291,8 @@ namespace Lucene.Net.Index
 			Assert.AreEqual(expectedCount, hitCount, "wrong number of hits");
 			for (int i = 0; i < hitCount; i++)
 			{
-				reader.Document(hits[i].doc);
-				reader.GetTermFreqVectors(hits[i].doc);
+				reader.Document(hits[i].Doc);
+				reader.GetTermFreqVectors(hits[i].Doc);
 			}
 		}
 		
@@ -305,7 +305,7 @@ namespace Lucene.Net.Index
 			
 			Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
 			IndexSearcher searcher = new IndexSearcher(dir, true);
-			IndexReader reader = searcher.GetIndexReader();
+			IndexReader reader = searcher.IndexReader;
 			
 			_TestUtil.CheckIndex(dir);
 			
@@ -322,19 +322,19 @@ namespace Lucene.Net.Index
 						    int numFields = oldName.StartsWith("29.") ? 7 : 5;
 							Assert.AreEqual(numFields, fields.Count);
 							Field f = d.GetField("id");
-							Assert.AreEqual("" + i, f.StringValue());
+							Assert.AreEqual("" + i, f.StringValue);
 							
 							f = (Field) d.GetField("utf8");
-							Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
+							Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue);
 							
 							f = (Field) d.GetField("autf8");
-							Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue());
+							Assert.AreEqual("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.StringValue);
 							
 							f = (Field) d.GetField("content2");
-							Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue());
+							Assert.AreEqual("here is more content with aaa aaa aaa", f.StringValue);
 							
 							f = (Field) d.GetField("fie\u2C77ld");
-							Assert.AreEqual("field with non-ascii name", f.StringValue());
+							Assert.AreEqual("field with non-ascii name", f.StringValue);
 						}
 					}
 				}
@@ -347,10 +347,10 @@ namespace Lucene.Net.Index
 			
 			// First document should be #21 since it's norm was
 			// increased:
-			Document d2 = searcher.Doc(hits[0].doc);
+			Document d2 = searcher.Doc(hits[0].Doc);
 			Assert.AreEqual("21", d2.Get("id"), "didn't get the right document first");
 			
-			TestHits(hits, 34, searcher.GetIndexReader());
+			TestHits(hits, 34, searcher.IndexReader);
 			
 			if (!oldName.StartsWith("19.") && !oldName.StartsWith("20.") && !oldName.StartsWith("21.") && !oldName.StartsWith("22."))
 			{
@@ -408,9 +408,9 @@ namespace Lucene.Net.Index
 			// make sure searching sees right # hits
 			IndexSearcher searcher = new IndexSearcher(dir, true);
 			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
-			Document d = searcher.Doc(hits[0].doc);
+			Document d = searcher.Doc(hits[0].Doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
-			TestHits(hits, 44, searcher.GetIndexReader());
+			TestHits(hits, 44, searcher.IndexReader);
 			searcher.Close();
 			
 			// make sure we can do delete & setNorm against this
@@ -426,9 +426,9 @@ namespace Lucene.Net.Index
 			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(43, hits.Length, "wrong number of hits");
-			d = searcher.Doc(hits[0].doc);
+			d = searcher.Doc(hits[0].Doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-			TestHits(hits, 43, searcher.GetIndexReader());
+			TestHits(hits, 43, searcher.IndexReader);
 			searcher.Close();
 			
 			// optimize
@@ -439,8 +439,8 @@ namespace Lucene.Net.Index
 			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(43, hits.Length, "wrong number of hits");
-			d = searcher.Doc(hits[0].doc);
-			TestHits(hits, 43, searcher.GetIndexReader());
+			d = searcher.Doc(hits[0].Doc);
+			TestHits(hits, 43, searcher.IndexReader);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
@@ -459,7 +459,7 @@ namespace Lucene.Net.Index
 			IndexSearcher searcher = new IndexSearcher(dir, true);
 			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(34, hits.Length, "wrong number of hits");
-			Document d = searcher.Doc(hits[0].doc);
+			Document d = searcher.Doc(hits[0].Doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
 			searcher.Close();
 			
@@ -476,9 +476,9 @@ namespace Lucene.Net.Index
 			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(33, hits.Length, "wrong number of hits");
-			d = searcher.Doc(hits[0].doc);
+			d = searcher.Doc(hits[0].Doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-			TestHits(hits, 33, searcher.GetIndexReader());
+			TestHits(hits, 33, searcher.IndexReader);
 			searcher.Close();
 			
 			// optimize
@@ -489,9 +489,9 @@ namespace Lucene.Net.Index
 			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(33, hits.Length, "wrong number of hits");
-			d = searcher.Doc(hits[0].doc);
+			d = searcher.Doc(hits[0].Doc);
 			Assert.AreEqual("22", d.Get("id"), "wrong first document");
-			TestHits(hits, 33, searcher.GetIndexReader());
+			TestHits(hits, 33, searcher.IndexReader);
 			searcher.Close();
 			
 			dir.Close();
@@ -506,7 +506,7 @@ namespace Lucene.Net.Index
 			
 			Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			writer.SetUseCompoundFile(doCFS);
+			writer.UseCompoundFile = doCFS;
 			writer.SetMaxBufferedDocs(10);
 			
 			for (int i = 0; i < 35; i++)
@@ -518,7 +518,7 @@ namespace Lucene.Net.Index
 			
 			// open fresh writer so we get no prx file in the added segment
 			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-			writer.SetUseCompoundFile(doCFS);
+			writer.UseCompoundFile = doCFS;
 			writer.SetMaxBufferedDocs(10);
 			AddNoProxDoc(writer);
 			writer.Close();
@@ -653,10 +653,10 @@ namespace Lucene.Net.Index
 		{
 			Document doc = new Document();
 			Field f = new Field("content3", "aaa", Field.Store.YES, Field.Index.ANALYZED);
-			f.SetOmitTermFreqAndPositions(true);
+			f.OmitTermFreqAndPositions = true;
 			doc.Add(f);
 			f = new Field("content4", "aaa", Field.Store.YES, Field.Index.NO);
-			f.SetOmitTermFreqAndPositions(true);
+			f.OmitTermFreqAndPositions = true;
 			doc.Add(f);
 			writer.AddDocument(doc);
 		}

Modified: incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs Mon Mar 12 22:29:26 2012
@@ -104,12 +104,12 @@ namespace Lucene.Net.Index
 			Assert.IsNotNull(expected, msg + " null expected");
 			Assert.IsNotNull(test, msg + " null test");
 			Assert.AreEqual(expected.Length(), test.Length(), msg + " length");
-			Assert.AreEqual(expected.GetFilePointer(), test.GetFilePointer(), msg + " position");
+			Assert.AreEqual(expected.FilePointer, test.FilePointer, msg + " position");
 			
 			byte[] expectedBuffer = new byte[512];
 			byte[] testBuffer = new byte[expectedBuffer.Length];
 			
-			long remainder = expected.Length() - expected.GetFilePointer();
+			long remainder = expected.Length() - expected.FilePointer;
 			while (remainder > 0)
 			{
 				int readLen = (int) System.Math.Min(remainder, expectedBuffer.Length);
@@ -445,8 +445,8 @@ namespace Lucene.Net.Index
 			// Seek the first pair
 			e1.Seek(100);
 			a1.Seek(100);
-			Assert.AreEqual(100, e1.GetFilePointer());
-			Assert.AreEqual(100, a1.GetFilePointer());
+			Assert.AreEqual(100, e1.FilePointer);
+			Assert.AreEqual(100, a1.FilePointer);
 			byte be1 = e1.ReadByte();
 			byte ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
@@ -454,15 +454,15 @@ namespace Lucene.Net.Index
 			// Now seek the second pair
 			e2.Seek(1027);
 			a2.Seek(1027);
-			Assert.AreEqual(1027, e2.GetFilePointer());
-			Assert.AreEqual(1027, a2.GetFilePointer());
+			Assert.AreEqual(1027, e2.FilePointer);
+			Assert.AreEqual(1027, a2.FilePointer);
 			byte be2 = e2.ReadByte();
 			byte ba2 = a2.ReadByte();
 			Assert.AreEqual(be2, ba2);
 			
 			// Now make sure the first one didn't move
-			Assert.AreEqual(101, e1.GetFilePointer());
-			Assert.AreEqual(101, a1.GetFilePointer());
+			Assert.AreEqual(101, e1.FilePointer);
+			Assert.AreEqual(101, a1.FilePointer);
 			be1 = e1.ReadByte();
 			ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
@@ -470,15 +470,15 @@ namespace Lucene.Net.Index
 			// Now more the first one again, past the buffer length
 			e1.Seek(1910);
 			a1.Seek(1910);
-			Assert.AreEqual(1910, e1.GetFilePointer());
-			Assert.AreEqual(1910, a1.GetFilePointer());
+			Assert.AreEqual(1910, e1.FilePointer);
+			Assert.AreEqual(1910, a1.FilePointer);
 			be1 = e1.ReadByte();
 			ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
 			
 			// Now make sure the second set didn't move
-			Assert.AreEqual(1028, e2.GetFilePointer());
-			Assert.AreEqual(1028, a2.GetFilePointer());
+			Assert.AreEqual(1028, e2.FilePointer);
+			Assert.AreEqual(1028, a2.FilePointer);
 			be2 = e2.ReadByte();
 			ba2 = a2.ReadByte();
 			Assert.AreEqual(be2, ba2);
@@ -486,16 +486,16 @@ namespace Lucene.Net.Index
 			// Move the second set back, again cross the buffer size
 			e2.Seek(17);
 			a2.Seek(17);
-			Assert.AreEqual(17, e2.GetFilePointer());
-			Assert.AreEqual(17, a2.GetFilePointer());
+			Assert.AreEqual(17, e2.FilePointer);
+			Assert.AreEqual(17, a2.FilePointer);
 			be2 = e2.ReadByte();
 			ba2 = a2.ReadByte();
 			Assert.AreEqual(be2, ba2);
 			
 			// Finally, make sure the first set didn't move
 			// Now make sure the first one didn't move
-			Assert.AreEqual(1911, e1.GetFilePointer());
-			Assert.AreEqual(1911, a1.GetFilePointer());
+			Assert.AreEqual(1911, e1.FilePointer);
+			Assert.AreEqual(1911, a1.FilePointer);
 			be1 = e1.ReadByte();
 			ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
@@ -526,8 +526,8 @@ namespace Lucene.Net.Index
 			// Seek the first pair
 			e1.Seek(100);
 			a1.Seek(100);
-			Assert.AreEqual(100, e1.GetFilePointer());
-			Assert.AreEqual(100, a1.GetFilePointer());
+			Assert.AreEqual(100, e1.FilePointer);
+			Assert.AreEqual(100, a1.FilePointer);
 			byte be1 = e1.ReadByte();
 			byte ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
@@ -535,15 +535,15 @@ namespace Lucene.Net.Index
 			// Now seek the second pair
 			e2.Seek(1027);
 			a2.Seek(1027);
-			Assert.AreEqual(1027, e2.GetFilePointer());
-			Assert.AreEqual(1027, a2.GetFilePointer());
+			Assert.AreEqual(1027, e2.FilePointer);
+			Assert.AreEqual(1027, a2.FilePointer);
 			byte be2 = e2.ReadByte();
 			byte ba2 = a2.ReadByte();
 			Assert.AreEqual(be2, ba2);
 			
 			// Now make sure the first one didn't move
-			Assert.AreEqual(101, e1.GetFilePointer());
-			Assert.AreEqual(101, a1.GetFilePointer());
+			Assert.AreEqual(101, e1.FilePointer);
+			Assert.AreEqual(101, a1.FilePointer);
 			be1 = e1.ReadByte();
 			ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
@@ -551,15 +551,15 @@ namespace Lucene.Net.Index
 			// Now more the first one again, past the buffer length
 			e1.Seek(1910);
 			a1.Seek(1910);
-			Assert.AreEqual(1910, e1.GetFilePointer());
-			Assert.AreEqual(1910, a1.GetFilePointer());
+			Assert.AreEqual(1910, e1.FilePointer);
+			Assert.AreEqual(1910, a1.FilePointer);
 			be1 = e1.ReadByte();
 			ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
 			
 			// Now make sure the second set didn't move
-			Assert.AreEqual(1028, e2.GetFilePointer());
-			Assert.AreEqual(1028, a2.GetFilePointer());
+			Assert.AreEqual(1028, e2.FilePointer);
+			Assert.AreEqual(1028, a2.FilePointer);
 			be2 = e2.ReadByte();
 			ba2 = a2.ReadByte();
 			Assert.AreEqual(be2, ba2);
@@ -567,16 +567,16 @@ namespace Lucene.Net.Index
 			// Move the second set back, again cross the buffer size
 			e2.Seek(17);
 			a2.Seek(17);
-			Assert.AreEqual(17, e2.GetFilePointer());
-			Assert.AreEqual(17, a2.GetFilePointer());
+			Assert.AreEqual(17, e2.FilePointer);
+			Assert.AreEqual(17, a2.FilePointer);
 			be2 = e2.ReadByte();
 			ba2 = a2.ReadByte();
 			Assert.AreEqual(be2, ba2);
 			
 			// Finally, make sure the first set didn't move
 			// Now make sure the first one didn't move
-			Assert.AreEqual(1911, e1.GetFilePointer());
-			Assert.AreEqual(1911, a1.GetFilePointer());
+			Assert.AreEqual(1911, e1.FilePointer);
+			Assert.AreEqual(1911, a1.FilePointer);
 			be1 = e1.ReadByte();
 			ba1 = a1.ReadByte();
 			Assert.AreEqual(be1, ba1);
@@ -662,12 +662,12 @@ namespace Lucene.Net.Index
 				largeBuf[i] = (byte) ((new System.Random().NextDouble()) * 256);
 			}
 			
-			long currentPos = os.GetFilePointer();
+			long currentPos = os.FilePointer;
 			os.WriteBytes(largeBuf, largeBuf.Length);
 			
 			try
 			{
-				Assert.AreEqual(currentPos + largeBuf.Length, os.GetFilePointer());
+				Assert.AreEqual(currentPos + largeBuf.Length, os.FilePointer);
 			}
 			finally
 			{

Modified: incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs Mon Mar 12 22:29:26 2012
@@ -121,7 +121,7 @@ namespace Lucene.Net.Index
 			
 			writer.Close();
 			IndexReader reader = IndexReader.Open(directory, true);
-			Assert.AreEqual(200+extraCount, reader.NumDocs());
+			Assert.AreEqual(200+extraCount, reader.NumDocs);
 			reader.Close();
 			directory.Close();
 		}
@@ -144,7 +144,7 @@ namespace Lucene.Net.Index
 			// Force degenerate merging so we can get a mix of
 			// merging of segments with and without deletes at the
 			// start:
-			mp.SetMinMergeDocs(1000);
+			mp.MinMergeDocs = 1000;
 			
 			Document doc = new Document();
 			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
@@ -170,7 +170,7 @@ namespace Lucene.Net.Index
 			writer.Close();
 			IndexReader reader = IndexReader.Open(directory, true);
 			// Verify that we did not lose any deletes...
-			Assert.AreEqual(450, reader.NumDocs());
+			Assert.AreEqual(450, reader.NumDocs);
 			reader.Close();
 			directory.Close();
 		}
@@ -243,7 +243,7 @@ namespace Lucene.Net.Index
                 writer.Close(false);
 
                 IndexReader reader = IndexReader.Open(directory, true);
-                Assert.AreEqual((1 + iter)*182, reader.NumDocs());
+                Assert.AreEqual((1 + iter)*182, reader.NumDocs);
                 reader.Close();
 
                 // Reopen

Modified: incubator/lucene.net/trunk/test/core/Index/TestCrash.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestCrash.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestCrash.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestCrash.cs Mon Mar 12 22:29:26 2012
@@ -40,12 +40,12 @@ namespace Lucene.Net.Index
 		
 		private IndexWriter InitIndex(MockRAMDirectory dir)
 		{
-			dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
+			dir.SetLockFactory(NoLockFactory.Instance);
 
             IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
 			//writer.setMaxBufferedDocs(2);
 			writer.SetMaxBufferedDocs(10);
-			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
+			((ConcurrentMergeScheduler) writer.MergeScheduler).SetSuppressExceptions();
 			
 			Document doc = new Document();
 			doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
@@ -59,7 +59,7 @@ namespace Lucene.Net.Index
 		private void  Crash(IndexWriter writer)
 		{
 			MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
-			ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.GetMergeScheduler();
+			ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.MergeScheduler;
 			dir.Crash();
 			cms.Sync();
 			dir.ClearCrash();
@@ -72,7 +72,7 @@ namespace Lucene.Net.Index
 			MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
 			Crash(writer);
 			IndexReader reader = IndexReader.Open(dir, true);
-			Assert.IsTrue(reader.NumDocs() < 157);
+			Assert.IsTrue(reader.NumDocs < 157);
 		}
 		
 		[Test]
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			IndexReader reader = IndexReader.Open(dir, false);
-			Assert.IsTrue(reader.NumDocs() < 314);
+			Assert.IsTrue(reader.NumDocs < 314);
 		}
 		
 		[Test]
@@ -109,7 +109,7 @@ namespace Lucene.Net.Index
 			*/
 			
 			IndexReader reader = IndexReader.Open(dir, false);
-			Assert.IsTrue(reader.NumDocs() >= 157);
+			Assert.IsTrue(reader.NumDocs >= 157);
 		}
 		
 		[Test]
@@ -130,7 +130,7 @@ namespace Lucene.Net.Index
 			*/
 			
 			IndexReader reader = IndexReader.Open(dir, false);
-			Assert.AreEqual(157, reader.NumDocs());
+			Assert.AreEqual(157, reader.NumDocs);
 		}
 		
 		[Test]
@@ -151,7 +151,7 @@ namespace Lucene.Net.Index
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
 			IndexReader reader = IndexReader.Open(dir, false);
-			Assert.AreEqual(157, reader.NumDocs());
+			Assert.AreEqual(157, reader.NumDocs);
 		}
 		
 		[Test]
@@ -174,7 +174,7 @@ namespace Lucene.Net.Index
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
 			reader = IndexReader.Open(dir, false);
-			Assert.AreEqual(157, reader.NumDocs());
+			Assert.AreEqual(157, reader.NumDocs);
 		}
 		
 		[Test]
@@ -198,7 +198,7 @@ namespace Lucene.Net.Index
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
 			reader = IndexReader.Open(dir, false);
-			Assert.AreEqual(156, reader.NumDocs());
+			Assert.AreEqual(156, reader.NumDocs);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs Mon Mar 12 22:29:26 2012
@@ -44,20 +44,20 @@ namespace Lucene.Net.Index
 		private void  VerifyCommitOrder<T>(IList<T> commits) where T : IndexCommit
 		{
 			IndexCommit firstCommit = commits[0];
-			long last = SegmentInfos.GenerationFromSegmentsFileName(firstCommit.GetSegmentsFileName());
-			Assert.AreEqual(last, firstCommit.GetGeneration());
-			long lastVersion = firstCommit.GetVersion();
-			long lastTimestamp = firstCommit.GetTimestamp();
+			long last = SegmentInfos.GenerationFromSegmentsFileName(firstCommit.SegmentsFileName);
+			Assert.AreEqual(last, firstCommit.Generation);
+			long lastVersion = firstCommit.Version;
+			long lastTimestamp = firstCommit.Timestamp;
 			for (int i = 1; i < commits.Count; i++)
 			{
 				IndexCommit commit = commits[i];
-				long now = SegmentInfos.GenerationFromSegmentsFileName(commit.GetSegmentsFileName());
-				long nowVersion = commit.GetVersion();
-				long nowTimestamp = commit.GetTimestamp();
+				long now = SegmentInfos.GenerationFromSegmentsFileName(commit.SegmentsFileName);
+				long nowVersion = commit.Version;
+				long nowTimestamp = commit.Timestamp;
 				Assert.IsTrue(now > last, "SegmentInfos commits are out-of-order");
 				Assert.IsTrue(nowVersion > lastVersion, "SegmentInfos versions are out-of-order");
 				Assert.IsTrue(nowTimestamp >= lastTimestamp, "SegmentInfos timestamps are out-of-order: now=" + nowTimestamp + " vs last=" + lastTimestamp);
-				Assert.AreEqual(now, commit.GetGeneration());
+				Assert.AreEqual(now, commit.Generation);
 				last = now;
 				lastVersion = nowVersion;
 				lastTimestamp = nowTimestamp;
@@ -95,7 +95,7 @@ namespace Lucene.Net.Index
 			{
 				IndexCommit lastCommit = (IndexCommit) commits[commits.Count - 1];
 				IndexReader r = IndexReader.Open(dir, true);
-				Assert.AreEqual(r.IsOptimized(), lastCommit.IsOptimized(), "lastCommit.isOptimized()=" + lastCommit.IsOptimized() + " vs IndexReader.isOptimized=" + r.IsOptimized());
+				Assert.AreEqual(r.IsOptimized, lastCommit.IsOptimized(), "lastCommit.isOptimized()=" + lastCommit.IsOptimized() + " vs IndexReader.isOptimized=" + r.IsOptimized);
 				r.Close();
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnCommit++;
@@ -136,7 +136,7 @@ namespace Lucene.Net.Index
 				{
 					IndexCommit commit = (IndexCommit) it.Current;
 					commit.Delete();
-					Assert.IsTrue(commit.IsDeleted());
+					Assert.IsTrue(commit.IsDeleted);
 				}
 			}
 			public virtual void  OnCommit<T>(IList<T> commits) where T : IndexCommit
@@ -200,7 +200,7 @@ namespace Lucene.Net.Index
 				// commit:
 				if (isCommit)
 				{
-					System.String fileName = commits[commits.Count - 1].GetSegmentsFileName();
+					System.String fileName = commits[commits.Count - 1].SegmentsFileName;
 					if (seen.Contains(fileName))
 					{
 						throw new System.SystemException("onCommit was called twice on the same commit point: " + fileName);
@@ -261,14 +261,14 @@ namespace Lucene.Net.Index
 				IndexCommit lastCommit = commits[commits.Count - 1];
 				
 				// Any commit older than expireTime should be deleted:
-				double expireTime = dir.FileModified(lastCommit.GetSegmentsFileName()) / 1000.0 - expirationTimeSeconds;
+				double expireTime = dir.FileModified(lastCommit.SegmentsFileName) / 1000.0 - expirationTimeSeconds;
 				
 				System.Collections.IEnumerator it = commits.GetEnumerator();
 				
 				while (it.MoveNext())
 				{
 					IndexCommit commit = (IndexCommit) it.Current;
-					double modTime = dir.FileModified(commit.GetSegmentsFileName()) / 1000.0;
+					double modTime = dir.FileModified(commit.SegmentsFileName) / 1000.0;
 					if (commit != lastCommit && modTime < expireTime)
 					{
 						commit.Delete();
@@ -291,7 +291,7 @@ namespace Lucene.Net.Index
 			Directory dir = new RAMDirectory();
 			ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(this, dir, SECONDS);
             IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-			writer.SetUseCompoundFile(useCompoundFile);
+			writer.UseCompoundFile = useCompoundFile;
 			writer.Close();
 			
 			long lastDeleteTime = 0;
@@ -301,7 +301,7 @@ namespace Lucene.Net.Index
 				// past commits
 				lastDeleteTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
                 writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				for (int j = 0; j < 17; j++)
 				{
 					AddDoc(writer);
@@ -366,7 +366,7 @@ namespace Lucene.Net.Index
 
                 IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(10);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				writer.SetMergeScheduler(new SerialMergeScheduler());
 				for (int i = 0; i < 107; i++)
 				{
@@ -375,7 +375,7 @@ namespace Lucene.Net.Index
 				writer.Close();
 
                 writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				writer.Optimize();
 				writer.Close();
 				
@@ -458,7 +458,7 @@ namespace Lucene.Net.Index
 			while (it.MoveNext())
 			{
 				IndexCommit commit = (IndexCommit) it.Current;
-				if (lastCommit == null || commit.GetGeneration() > lastCommit.GetGeneration())
+				if (lastCommit == null || commit.Generation > lastCommit.Generation)
 					lastCommit = commit;
 			}
 			Assert.IsTrue(lastCommit != null);
@@ -481,8 +481,8 @@ namespace Lucene.Net.Index
 			
 			IndexReader r = IndexReader.Open(dir, true);
 			// Still optimized, still 11 docs
-			Assert.IsTrue(r.IsOptimized());
-			Assert.AreEqual(11, r.NumDocs());
+			Assert.IsTrue(r.IsOptimized);
+			Assert.AreEqual(11, r.NumDocs);
 			r.Close();
 			
 			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
@@ -496,8 +496,8 @@ namespace Lucene.Net.Index
 			r = IndexReader.Open(dir, true);
 			// Not optimized because we rolled it back, and now only
 			// 10 docs
-			Assert.IsTrue(!r.IsOptimized());
-			Assert.AreEqual(10, r.NumDocs());
+			Assert.IsTrue(!r.IsOptimized);
+			Assert.AreEqual(10, r.NumDocs);
 			r.Close();
 			
 			// Reoptimize
@@ -506,8 +506,8 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			r = IndexReader.Open(dir, true);
-			Assert.IsTrue(r.IsOptimized());
-			Assert.AreEqual(10, r.NumDocs());
+			Assert.IsTrue(r.IsOptimized);
+			Assert.AreEqual(10, r.NumDocs);
 			r.Close();
 			
 			// Now open writer on the commit just before optimize,
@@ -518,16 +518,16 @@ namespace Lucene.Net.Index
 			// Reader still sees optimized index, because writer
 			// opened on the prior commit has not yet committed:
 			r = IndexReader.Open(dir, true);
-			Assert.IsTrue(r.IsOptimized());
-			Assert.AreEqual(10, r.NumDocs());
+			Assert.IsTrue(r.IsOptimized);
+			Assert.AreEqual(10, r.NumDocs);
 			r.Close();
 			
 			writer.Close();
 			
 			// Now reader sees unoptimized index:
 			r = IndexReader.Open(dir, true);
-			Assert.IsTrue(!r.IsOptimized());
-			Assert.AreEqual(10, r.NumDocs());
+			Assert.IsTrue(!r.IsOptimized);
+			Assert.AreEqual(10, r.NumDocs);
 			r.Close();
 			
 			dir.Close();
@@ -551,7 +551,7 @@ namespace Lucene.Net.Index
 
                 IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(10);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				for (int i = 0; i < 107; i++)
 				{
 					AddDoc(writer);
@@ -559,7 +559,7 @@ namespace Lucene.Net.Index
 				writer.Close();
 
                 writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				writer.Optimize();
 				writer.Close();
 				
@@ -597,7 +597,7 @@ namespace Lucene.Net.Index
 				{
                     IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 					writer.SetMaxBufferedDocs(10);
-					writer.SetUseCompoundFile(useCompoundFile);
+					writer.UseCompoundFile = useCompoundFile;
 					for (int i = 0; i < 17; i++)
 					{
 						AddDoc(writer);
@@ -660,7 +660,7 @@ namespace Lucene.Net.Index
 				
 				Directory dir = new RAMDirectory();
                 IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				writer.Close();
 				Term searchTerm = new Term("content", "aaa");
 				Query query = new TermQuery(searchTerm);
@@ -668,7 +668,7 @@ namespace Lucene.Net.Index
 				for (int i = 0; i < N + 1; i++)
 				{
                     writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-					writer.SetUseCompoundFile(useCompoundFile);
+					writer.UseCompoundFile = useCompoundFile;
 					for (int j = 0; j < 17; j++)
 					{
 						AddDoc(writer);
@@ -686,7 +686,7 @@ namespace Lucene.Net.Index
 					searcher.Close();
 				}
                 writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				writer.Optimize();
 				// this is a commit
 				writer.Close();
@@ -770,7 +770,7 @@ namespace Lucene.Net.Index
 				Directory dir = new RAMDirectory();
                 IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(10);
-				writer.SetUseCompoundFile(useCompoundFile);
+				writer.UseCompoundFile = useCompoundFile;
 				writer.Close();
 				Term searchTerm = new Term("content", "aaa");
 				Query query = new TermQuery(searchTerm);
@@ -780,7 +780,7 @@ namespace Lucene.Net.Index
 
                     writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 					writer.SetMaxBufferedDocs(10);
-					writer.SetUseCompoundFile(useCompoundFile);
+					writer.UseCompoundFile = useCompoundFile;
 					for (int j = 0; j < 17; j++)
 					{
 						AddDoc(writer);

Modified: incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs Mon Mar 12 22:29:26 2012
@@ -104,11 +104,11 @@ namespace Lucene.Net.Index
 			sis.Read(dir);
 			IndexReader reader = OpenReader();
 			Assert.IsTrue(reader != null);
-			Assert.AreEqual(2, reader.NumDocs());
+			Assert.AreEqual(2, reader.NumDocs);
 			reader.DeleteDocument(0);
-			Assert.AreEqual(1, reader.NumDocs());
+			Assert.AreEqual(1, reader.NumDocs);
 			reader.UndeleteAll();
-			Assert.AreEqual(2, reader.NumDocs());
+			Assert.AreEqual(2, reader.NumDocs);
 			
 			// Ensure undeleteAll survives commit/close/reopen:
 			reader.Commit();
@@ -121,10 +121,10 @@ namespace Lucene.Net.Index
 			
 			sis.Read(dir);
 			reader = OpenReader();
-			Assert.AreEqual(2, reader.NumDocs());
+			Assert.AreEqual(2, reader.NumDocs);
 			
 			reader.DeleteDocument(0);
-			Assert.AreEqual(1, reader.NumDocs());
+			Assert.AreEqual(1, reader.NumDocs);
 			reader.Commit();
 			reader.Close();
 			if (reader is MultiReader)
@@ -133,7 +133,7 @@ namespace Lucene.Net.Index
 				sis.Commit(dir);
 			sis.Read(dir);
 			reader = OpenReader();
-			Assert.AreEqual(1, reader.NumDocs());
+			Assert.AreEqual(1, reader.NumDocs);
 		}
 		
 		
@@ -153,14 +153,14 @@ namespace Lucene.Net.Index
 			AddDoc(ramDir2, "test blah", true);
 			IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false)};
 			MultiReader mr = new MultiReader(readers);
-			Assert.IsTrue(mr.IsCurrent()); // just opened, must be current
+			Assert.IsTrue(mr.IsCurrent); // just opened, must be current
 			AddDoc(ramDir1, "more text", false);
-			Assert.IsFalse(mr.IsCurrent()); // has been modified, not current anymore
+			Assert.IsFalse(mr.IsCurrent); // has been modified, not current anymore
 			AddDoc(ramDir2, "even more text", false);
-			Assert.IsFalse(mr.IsCurrent()); // has been modified even more, not current anymore
+			Assert.IsFalse(mr.IsCurrent); // has been modified even more, not current anymore
 			try
 			{
-				mr.GetVersion();
+				var ver = mr.Version;
 				Assert.Fail();
 			}
 			catch (System.NotSupportedException e)

Modified: incubator/lucene.net/trunk/test/core/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDoc.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDoc.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDoc.cs Mon Mar 12 22:29:26 2012
@@ -226,7 +226,7 @@ namespace Lucene.Net.Index
 		{
 			SegmentReader reader = SegmentReader.Get(true, si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			
-			for (int i = 0; i < reader.NumDocs(); i++)
+			for (int i = 0; i < reader.NumDocs; i++)
 			{
 				out_Renamed.WriteLine(reader.Document(i));
 			}

Modified: incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs Mon Mar 12 22:29:26 2012
@@ -16,6 +16,7 @@
  */
 
 using System;
+using Lucene.Net.Documents;
 using Lucene.Net.Util;
 using NUnit.Framework;
 
@@ -31,7 +32,6 @@ using PositionIncrementAttribute = Lucen
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using Fieldable = Lucene.Net.Documents.Fieldable;
 using Index = Lucene.Net.Documents.Field.Index;
 using Store = Lucene.Net.Documents.Field.Store;
 using TermVector = Lucene.Net.Documents.Field.TermVector;
@@ -111,8 +111,8 @@ namespace Lucene.Net.Index
 					if (state != null)
 					{
 						RestoreState(state);
-						payloadAtt.SetPayload(null);
-						posIncrAtt.SetPositionIncrement(0);
+						payloadAtt.Payload = null;
+						posIncrAtt.PositionIncrement = 0;
 						termAtt.SetTermBuffer(new char[]{'b'}, 0, 1);
 						state = null;
 						return true;
@@ -123,12 +123,12 @@ namespace Lucene.Net.Index
 						return false;
 					if (System.Char.IsDigit(termAtt.TermBuffer()[0]))
 					{
-						posIncrAtt.SetPositionIncrement(termAtt.TermBuffer()[0] - '0');
+						posIncrAtt.PositionIncrement = termAtt.TermBuffer()[0] - '0';
 					}
 					if (first)
 					{
 						// set payload on first position only
-						payloadAtt.SetPayload(new Payload(new byte[]{100}));
+						payloadAtt.Payload = new Payload(new byte[]{100});
 						first = false;
 					}
 					
@@ -244,27 +244,27 @@ namespace Lucene.Net.Index
 			Assert.IsTrue(doc != null);
 			
 			//System.out.println("Document: " + doc);
-			Fieldable[] fields = doc.GetFields("textField2");
+			IFieldable[] fields = doc.GetFields("textField2");
 			Assert.IsTrue(fields != null && fields.Length == 1);
-			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_2_TEXT));
-			Assert.IsTrue(fields[0].IsTermVectorStored());
+			Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_2_TEXT));
+			Assert.IsTrue(fields[0].IsTermVectorStored);
 			
 			fields = doc.GetFields("textField1");
 			Assert.IsTrue(fields != null && fields.Length == 1);
-			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_1_TEXT));
-			Assert.IsFalse(fields[0].IsTermVectorStored());
+			Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_1_TEXT));
+			Assert.IsFalse(fields[0].IsTermVectorStored);
 			
 			fields = doc.GetFields("keyField");
 			Assert.IsTrue(fields != null && fields.Length == 1);
-			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.KEYWORD_TEXT));
+			Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.KEYWORD_TEXT));
 			
 			fields = doc.GetFields(DocHelper.NO_NORMS_KEY);
 			Assert.IsTrue(fields != null && fields.Length == 1);
-			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.NO_NORMS_TEXT));
+			Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.NO_NORMS_TEXT));
 			
 			fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY);
 			Assert.IsTrue(fields != null && fields.Length == 1);
-			Assert.IsTrue(fields[0].StringValue().Equals(DocHelper.FIELD_3_TEXT));
+			Assert.IsTrue(fields[0].StringValue.Equals(DocHelper.FIELD_3_TEXT));
 			
 			// test that the norms are not present in the segment if
 			// omitNorms is true
@@ -324,11 +324,11 @@ namespace Lucene.Net.Index
 			int freq = termPositions.Freq();
 			Assert.AreEqual(3, freq);
 			Assert.AreEqual(0, termPositions.NextPosition());
-			Assert.AreEqual(true, termPositions.IsPayloadAvailable());
+			Assert.AreEqual(true, termPositions.IsPayloadAvailable);
 			Assert.AreEqual(6, termPositions.NextPosition());
-			Assert.AreEqual(false, termPositions.IsPayloadAvailable());
+			Assert.AreEqual(false, termPositions.IsPayloadAvailable);
 			Assert.AreEqual(7, termPositions.NextPosition());
-			Assert.AreEqual(false, termPositions.IsPayloadAvailable());
+			Assert.AreEqual(false, termPositions.IsPayloadAvailable);
 		}
 		
 		
@@ -408,7 +408,7 @@ namespace Lucene.Net.Index
 			doc.Add(new Field("f1", "v2", Field.Store.YES, Field.Index.NO));
 			// f2 has no TF
 			Field f = new Field("f2", "v1", Field.Store.NO, Field.Index.ANALYZED);
-			f.SetOmitTermFreqAndPositions(true);
+			f.OmitTermFreqAndPositions = true;
 			doc.Add(f);
 			doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NO));
 			

Modified: incubator/lucene.net/trunk/test/core/Index/TestFieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestFieldInfos.cs?rev=1299911&r1=1299910&r2=1299911&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestFieldInfos.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestFieldInfos.cs Mon Mar 12 22:29:26 2012
@@ -69,7 +69,7 @@ namespace Lucene.Net.Index
 			{
 				fieldInfos.Write(output);
 				output.Close();
-				Assert.IsTrue(output.Length() > 0);
+				Assert.IsTrue(output.Length > 0);
 				FieldInfos readIn = new FieldInfos(dir, name);
 				Assert.IsTrue(fieldInfos.Size() == readIn.Size());
 				FieldInfo info = readIn.FieldInfo("textField1");



Mime
View raw message