lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject svn commit: r911154 [2/2] - in /lucene/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net/Search/Payloads/ Lucene.Net/Store/ Lucene.Net/Util/ Test/ Test/Analysis/ Test/I...
Date Wed, 17 Feb 2010 19:33:05 GMT
Modified: lucene/lucene.net/trunk/C#/src/Test/Analysis/BaseTokenStreamTestCase.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Analysis/BaseTokenStreamTestCase.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Analysis/BaseTokenStreamTestCase.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Analysis/BaseTokenStreamTestCase.cs Wed Feb 17 19:33:03 2010
@@ -97,151 +97,219 @@
 		}
 		
 		// some helpers to test Analyzers and TokenStreams:
-		
-		public static void  AssertTokenStreamContents(TokenStream ts, System.String[] output, int[] startOffsets, int[] endOffsets, System.String[] types, int[] posIncrements)
-		{
-			Assert.IsNotNull(output);
-			Assert.IsTrue(ts.HasAttribute(typeof(TermAttribute)), "has TermAttribute");
-			TermAttribute termAtt = (TermAttribute) ts.GetAttribute(typeof(TermAttribute));
-			
-			OffsetAttribute offsetAtt = null;
-			if (startOffsets != null || endOffsets != null)
-			{
-				Assert.IsTrue(ts.HasAttribute(typeof(OffsetAttribute)), "has OffsetAttribute");
-				offsetAtt = (OffsetAttribute) ts.GetAttribute(typeof(OffsetAttribute));
-			}
-			
-			TypeAttribute typeAtt = null;
-			if (types != null)
-			{
-				Assert.IsTrue(ts.HasAttribute(typeof(TypeAttribute)), "has TypeAttribute");
-				typeAtt = (TypeAttribute) ts.GetAttribute(typeof(TypeAttribute));
-			}
-			
-			PositionIncrementAttribute posIncrAtt = null;
-			if (posIncrements != null)
-			{
-				Assert.IsTrue(ts.HasAttribute(typeof(PositionIncrementAttribute)), "has PositionIncrementAttribute");
-				posIncrAtt = (PositionIncrementAttribute) ts.GetAttribute(typeof(PositionIncrementAttribute));
-			}
-			
-			ts.Reset();
-			for (int i = 0; i < output.Length; i++)
-			{
-				// extra safety to enforce, that the state is not preserved and also
-				// assign bogus values
-				ts.ClearAttributes();
-				termAtt.SetTermBuffer("bogusTerm");
-				if (offsetAtt != null)
-					offsetAtt.SetOffset(14584724, 24683243);
-				if (typeAtt != null)
-					typeAtt.SetType("bogusType");
-				if (posIncrAtt != null)
-					posIncrAtt.SetPositionIncrement(45987657);
-				
-				Assert.IsTrue(ts.IncrementToken(), "token " + i + " exists");
-				Assert.AreEqual(output[i], termAtt.Term(), "term " + i);
-				if (startOffsets != null)
-					Assert.AreEqual(startOffsets[i], offsetAtt.StartOffset(), "startOffset " + i);
-				if (endOffsets != null)
-					Assert.AreEqual(endOffsets[i], offsetAtt.EndOffset(), "endOffset " + i);
-				if (types != null)
-					Assert.AreEqual(types[i], typeAtt.Type(), "type " + i);
-				if (posIncrements != null)
-					Assert.AreEqual(posIncrements[i], posIncrAtt.GetPositionIncrement(), "posIncrement " + i);
-			}
-			Assert.IsFalse(ts.IncrementToken(), "end of stream");
-			ts.End();
-			ts.Close();
-		}
-		
-		public static void  AssertTokenStreamContents(TokenStream ts, System.String[] output)
-		{
-			AssertTokenStreamContents(ts, output, null, null, null, null);
-		}
-		
-		public static void  AssertTokenStreamContents(TokenStream ts, System.String[] output, System.String[] types)
-		{
-			AssertTokenStreamContents(ts, output, null, null, types, null);
-		}
-		
-		public static void  AssertTokenStreamContents(TokenStream ts, System.String[] output, int[] posIncrements)
-		{
-			AssertTokenStreamContents(ts, output, null, null, null, posIncrements);
-		}
-		
-		public static void  AssertTokenStreamContents(TokenStream ts, System.String[] output, int[] startOffsets, int[] endOffsets)
-		{
-			AssertTokenStreamContents(ts, output, startOffsets, endOffsets, null, null);
-		}
-		
-		public static void  AssertTokenStreamContents(TokenStream ts, System.String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements)
-		{
-			AssertTokenStreamContents(ts, output, startOffsets, endOffsets, null, posIncrements);
-		}
-		
-		
-		public static void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output, int[] startOffsets, int[] endOffsets, System.String[] types, int[] posIncrements)
-		{
-			AssertTokenStreamContents(a.TokenStream("dummy", new System.IO.StringReader(input)), output, startOffsets, endOffsets, types, posIncrements);
-		}
-		
-		public static void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output)
-		{
-			AssertAnalyzesTo(a, input, output, null, null, null, null);
-		}
-		
-		public static void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output, System.String[] types)
-		{
-			AssertAnalyzesTo(a, input, output, null, null, types, null);
-		}
-		
-		public static void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output, int[] posIncrements)
-		{
-			AssertAnalyzesTo(a, input, output, null, null, null, posIncrements);
-		}
-		
-		public static void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output, int[] startOffsets, int[] endOffsets)
-		{
-			AssertAnalyzesTo(a, input, output, startOffsets, endOffsets, null, null);
-		}
-		
-		public static void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements)
-		{
-			AssertAnalyzesTo(a, input, output, startOffsets, endOffsets, null, posIncrements);
-		}
-		
-		
-		public static void  AssertAnalyzesToReuse(Analyzer a, System.String input, System.String[] output, int[] startOffsets, int[] endOffsets, System.String[] types, int[] posIncrements)
-		{
-			AssertTokenStreamContents(a.ReusableTokenStream("dummy", new System.IO.StringReader(input)), output, startOffsets, endOffsets, types, posIncrements);
-		}
-		
-		public static void  AssertAnalyzesToReuse(Analyzer a, System.String input, System.String[] output)
-		{
-			AssertAnalyzesToReuse(a, input, output, null, null, null, null);
-		}
-		
-		public static void  AssertAnalyzesToReuse(Analyzer a, System.String input, System.String[] output, System.String[] types)
-		{
-			AssertAnalyzesToReuse(a, input, output, null, null, types, null);
-		}
-		
-		public static void  AssertAnalyzesToReuse(Analyzer a, System.String input, System.String[] output, int[] posIncrements)
-		{
-			AssertAnalyzesToReuse(a, input, output, null, null, null, posIncrements);
-		}
-		
-		public static void  AssertAnalyzesToReuse(Analyzer a, System.String input, System.String[] output, int[] startOffsets, int[] endOffsets)
-		{
-			AssertAnalyzesToReuse(a, input, output, startOffsets, endOffsets, null, null);
-		}
-		
-		public static void  AssertAnalyzesToReuse(Analyzer a, System.String input, System.String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements)
-		{
-			AssertAnalyzesToReuse(a, input, output, startOffsets, endOffsets, null, posIncrements);
-		}
-		
+
+        public interface CheckClearAttributesAttribute : Lucene.Net.Util.Attribute
+        {
+               bool GetAndResetClearCalled();
+        }
+
+        public class CheckClearAttributesAttributeImpl : Lucene.Net.Util.AttributeImpl ,CheckClearAttributesAttribute 
+        {
+            private bool clearCalled = false;
+
+            public bool GetAndResetClearCalled()
+            {
+                try
+                {
+                    return clearCalled;
+                }
+                finally
+                {
+                    clearCalled = false;
+                }
+            }
+
+            //@Override
+            public override void Clear()
+            {
+                clearCalled = true;
+            }
+
+            //@Override
+            public  override bool Equals(Object other) 
+            {
+                return (
+                other is CheckClearAttributesAttributeImpl &&
+                ((CheckClearAttributesAttributeImpl) other).clearCalled == this.clearCalled
+                );
+            }
+
+
+            //@Override
+            public override int GetHashCode()
+            {
+                //Java: return 76137213 ^ Boolean.valueOf(clearCalled).hashCode();
+                return 76137213 ^ clearCalled.GetHashCode();
+            }
+
+            //@Override
+            public override void CopyTo(Lucene.Net.Util.AttributeImpl target)
+            {
+                ((CheckClearAttributesAttributeImpl)target).Clear();
+            }
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, System.String[] output, int[] startOffsets, int[] endOffsets, System.String[] types, int[] posIncrements, int? finalOffset)
+        {
+            Assert.IsNotNull(output);
+            CheckClearAttributesAttribute checkClearAtt = (CheckClearAttributesAttribute)ts.AddAttribute(typeof(CheckClearAttributesAttribute));
+
+            Assert.IsTrue(ts.HasAttribute(typeof(TermAttribute)), "has no TermAttribute");
+            TermAttribute termAtt = (TermAttribute)ts.GetAttribute(typeof(TermAttribute));
+
+            OffsetAttribute offsetAtt = null;
+            if (startOffsets != null || endOffsets != null || finalOffset != null)
+            {
+                Assert.IsTrue(ts.HasAttribute(typeof(OffsetAttribute)), "has no OffsetAttribute");
+                offsetAtt = (OffsetAttribute)ts.GetAttribute(typeof(OffsetAttribute));
+            }
+    
+            TypeAttribute typeAtt = null;
+            if (types != null)
+            {
+                Assert.IsTrue(ts.HasAttribute(typeof(TypeAttribute)), "has no TypeAttribute");
+                typeAtt = (TypeAttribute)ts.GetAttribute(typeof(TypeAttribute));
+            }
+            
+            PositionIncrementAttribute posIncrAtt = null;
+            if (posIncrements != null)
+            {
+                Assert.IsTrue(ts.HasAttribute(typeof(PositionIncrementAttribute)), "has no PositionIncrementAttribute");
+                posIncrAtt = (PositionIncrementAttribute)ts.GetAttribute(typeof(PositionIncrementAttribute));
+            }
+
+            ts.Reset();
+            for (int i = 0; i < output.Length; i++)
+            {
+                // extra safety to enforce, that the state is not preserved and also assign bogus values
+                ts.ClearAttributes();
+                termAtt.SetTermBuffer("bogusTerm");
+                if (offsetAtt != null) offsetAtt.SetOffset(14584724, 24683243);
+                if (typeAtt != null) typeAtt.SetType("bogusType");
+                if (posIncrAtt != null) posIncrAtt.SetPositionIncrement(45987657);
+
+                checkClearAtt.GetAndResetClearCalled(); // reset it, because we called clearAttribute() before
+                Assert.IsTrue(ts.IncrementToken(), "token " + i + " does not exist");
+                Assert.IsTrue(checkClearAtt.GetAndResetClearCalled(), "clearAttributes() was not called correctly in TokenStream chain");
+
+                Assert.AreEqual(output[i], termAtt.Term(), "term " + i);
+                if (startOffsets != null)
+                    Assert.AreEqual(startOffsets[i], offsetAtt.StartOffset(), "startOffset " + i);
+                if (endOffsets != null)
+                    Assert.AreEqual(endOffsets[i], offsetAtt.EndOffset(), "endOffset " + i);
+                if (types != null)
+                    Assert.AreEqual(types[i], typeAtt.Type(), "type " + i);
+                if (posIncrements != null)
+                    Assert.AreEqual(posIncrements[i], posIncrAtt.GetPositionIncrement(), "posIncrement " + i);
+            }
+            Assert.IsFalse(ts.IncrementToken(), "end of stream");
+            ts.End();
+            if (finalOffset.HasValue)
+                Assert.AreEqual(finalOffset, offsetAtt.EndOffset(), "finalOffset ");
+            ts.Close();
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, int[] startOffsets, int[] endOffsets, String[] types, int[] posIncrements)
+        {
+            AssertTokenStreamContents(ts, output, startOffsets, endOffsets, types, posIncrements, null);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output)
+        {
+            AssertTokenStreamContents(ts, output, null, null, null, null, null);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, String[] types)
+        {
+            AssertTokenStreamContents(ts, output, null, null, types, null, null);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, int[] posIncrements)
+        {
+            AssertTokenStreamContents(ts, output, null, null, null, posIncrements, null);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, int[] startOffsets, int[] endOffsets)
+        {
+            AssertTokenStreamContents(ts, output, startOffsets, endOffsets, null, null, null);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, int[] startOffsets, int[] endOffsets, int? finalOffset)
+        {
+            AssertTokenStreamContents(ts, output, startOffsets, endOffsets, null, null, finalOffset);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements)
+        {
+            AssertTokenStreamContents(ts, output, startOffsets, endOffsets, null, posIncrements, null);
+        }
+
+        public static void AssertTokenStreamContents(TokenStream ts, String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements, int? finalOffset)
+        {
+            AssertTokenStreamContents(ts, output, startOffsets, endOffsets, null, posIncrements, finalOffset);
+        }
+
+        public static void AssertAnalyzesTo(Analyzer a, String input, String[] output, int[] startOffsets, int[] endOffsets, String[] types, int[] posIncrements)
+        {
+            AssertTokenStreamContents(a.TokenStream("dummy", new System.IO.StringReader(input)), output, startOffsets, endOffsets, types, posIncrements, input.Length);
+        }
+
+        public static void AssertAnalyzesTo(Analyzer a, String input, String[] output)
+        {
+            AssertAnalyzesTo(a, input, output, null, null, null, null);
+        }
+
+        public static void AssertAnalyzesTo(Analyzer a, String input, String[] output, String[] types)
+        {
+            AssertAnalyzesTo(a, input, output, null, null, types, null);
+        }
+
+        public static void AssertAnalyzesTo(Analyzer a, String input, String[] output, int[] posIncrements)
+        {
+            AssertAnalyzesTo(a, input, output, null, null, null, posIncrements);
+        }
+
+        public static void AssertAnalyzesTo(Analyzer a, String input, String[] output, int[] startOffsets, int[] endOffsets)
+        {
+            AssertAnalyzesTo(a, input, output, startOffsets, endOffsets, null, null);
+        }
+
+        public static void AssertAnalyzesTo(Analyzer a, String input, String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements)
+        {
+            AssertAnalyzesTo(a, input, output, startOffsets, endOffsets, null, posIncrements);
+        }
+
+
+        public static void AssertAnalyzesToReuse(Analyzer a, String input, String[] output, int[] startOffsets, int[] endOffsets, String[] types, int[] posIncrements)
+        {
+            AssertTokenStreamContents(a.ReusableTokenStream("dummy", new System.IO.StringReader(input)), output, startOffsets, endOffsets, types, posIncrements, input.Length);
+        }
+
+        public static void AssertAnalyzesToReuse(Analyzer a, String input, String[] output)
+        {
+            AssertAnalyzesToReuse(a, input, output, null, null, null, null);
+        }
+
+        public static void AssertAnalyzesToReuse(Analyzer a, String input, String[] output, String[] types)
+        {
+            AssertAnalyzesToReuse(a, input, output, null, null, types, null);
+        }
+
+        public static void AssertAnalyzesToReuse(Analyzer a, String input, String[] output, int[] posIncrements)
+        {
+            AssertAnalyzesToReuse(a, input, output, null, null, null, posIncrements);
+        }
+
+        public static void AssertAnalyzesToReuse(Analyzer a, String input, String[] output, int[] startOffsets, int[] endOffsets)
+        {
+            AssertAnalyzesToReuse(a, input, output, startOffsets, endOffsets, null, null);
+        }
+
+        public static void AssertAnalyzesToReuse(Analyzer a, String input, String[] output, int[] startOffsets, int[] endOffsets, int[] posIncrements)
+        {
+            AssertAnalyzesToReuse(a, input, output, startOffsets, endOffsets, null, posIncrements);
+        }
+
 		// simple utility method for testing stemmers
 		
 		public static void  CheckOneTerm(Analyzer a, System.String input, System.String expected)

Modified: lucene/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Analysis/TestCachingTokenFilter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs Wed Feb 17 19:33:03 2010
@@ -70,6 +70,7 @@
 				}
 				else
 				{
+                    ClearAttributes();
 					termAtt.SetTermBuffer(Enclosing_Instance.tokens[index++]);
 					offsetAtt.SetOffset(0, 0);
 					return true;

Modified: lucene/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Analysis/TestTeeSinkTokenFilter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs Wed Feb 17 19:33:03 2010
@@ -93,32 +93,13 @@
 			TeeSinkTokenFilter source = new TeeSinkTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())));
 			TokenStream sink1 = source.NewSinkTokenStream();
 			TokenStream sink2 = source.NewSinkTokenStream(theFilter);
-			int i = 0;
-			TermAttribute termAtt = (TermAttribute) source.GetAttribute(typeof(TermAttribute));
-			while (source.IncrementToken())
-			{
-				Assert.AreEqual(tokens1[i], termAtt.Term());
-				i++;
-			}
-			Assert.AreEqual(tokens1.Length, i);
-			
-			i = 0;
-			termAtt = (TermAttribute) sink1.GetAttribute(typeof(TermAttribute));
-			while (sink1.IncrementToken())
-			{
-				Assert.AreEqual(tokens1[i], termAtt.Term());
-				i++;
-			}
-			Assert.AreEqual(tokens1.Length, i);
-			
-			i = 0;
-			termAtt = (TermAttribute) sink2.GetAttribute(typeof(TermAttribute));
-			while (sink2.IncrementToken())
-			{
-				Assert.IsTrue(termAtt.Term().ToUpper().Equals("The".ToUpper()));
-				i++;
-			}
-			Assert.AreEqual(2, i, "there should be two times 'the' in the stream");
+
+            source.AddAttribute(typeof(CheckClearAttributesAttribute));
+            sink1.AddAttribute(typeof(CheckClearAttributesAttribute));
+            sink2.AddAttribute(typeof(CheckClearAttributesAttribute));
+    
+            AssertTokenStreamContents(source, tokens1);
+            AssertTokenStreamContents(sink1, tokens1);
 		}
 		
 		[Test]
@@ -129,54 +110,28 @@
 			TeeSinkTokenFilter.SinkTokenStream theDetector = tee1.NewSinkTokenStream(theFilter);
 			TokenStream source1 = new CachingTokenFilter(tee1);
 			
+             
+            tee1.AddAttribute(typeof(CheckClearAttributesAttribute));
+            dogDetector.AddAttribute(typeof(CheckClearAttributesAttribute));
+            theDetector.AddAttribute(typeof(CheckClearAttributesAttribute));
+
+
 			TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer2.ToString())));
 			tee2.AddSinkTokenStream(dogDetector);
 			tee2.AddSinkTokenStream(theDetector);
 			TokenStream source2 = tee2;
-			
-			int i = 0;
-			TermAttribute termAtt = (TermAttribute) source1.GetAttribute(typeof(TermAttribute));
-			while (source1.IncrementToken())
-			{
-				Assert.AreEqual(tokens1[i], termAtt.Term());
-				i++;
-			}
-			Assert.AreEqual(tokens1.Length, i);
-			i = 0;
-			termAtt = (TermAttribute) source2.GetAttribute(typeof(TermAttribute));
-			while (source2.IncrementToken())
-			{
-				Assert.AreEqual(tokens2[i], termAtt.Term());
-				i++;
-			}
-			Assert.AreEqual(tokens2.Length, i);
-			i = 0;
-			termAtt = (TermAttribute) theDetector.GetAttribute(typeof(TermAttribute));
-			while (theDetector.IncrementToken())
-			{
-				Assert.IsTrue(termAtt.Term().ToUpper().Equals("The".ToUpper()), "'" + termAtt.Term() + "' is not equal to 'The'");
-				i++;
-			}
-			Assert.AreEqual(4, i, "there must be 4 times 'The' in the stream");
-			i = 0;
-			termAtt = (TermAttribute) dogDetector.GetAttribute(typeof(TermAttribute));
-			while (dogDetector.IncrementToken())
-			{
-				Assert.IsTrue(termAtt.Term().ToUpper().Equals("Dogs".ToUpper()), "'" + termAtt.Term() + "' is not equal to 'Dogs'");
-				i++;
-			}
-			Assert.AreEqual(2, i, "there must be 2 times 'Dog' in the stream");
-			
+
+            AssertTokenStreamContents(source1, tokens1);
+            AssertTokenStreamContents(source2, tokens2);
+
+            AssertTokenStreamContents(theDetector, new String[] { "The", "the", "The", "the" });
+            			
 			source1.Reset();
 			TokenStream lowerCasing = new LowerCaseFilter(source1);
-			i = 0;
-			termAtt = (TermAttribute) lowerCasing.GetAttribute(typeof(TermAttribute));
-			while (lowerCasing.IncrementToken())
-			{
-				Assert.AreEqual(tokens1[i].ToLower(), termAtt.Term());
-				i++;
-			}
-			Assert.AreEqual(i, tokens1.Length);
+            String[] lowerCaseTokens = new String[tokens1.Length];
+            for (int i = 0; i < tokens1.Length; i++)
+                lowerCaseTokens[i] = tokens1[i].ToLower();
+
 		}
 		
 		/// <summary> Not an explicit test, just useful to print out some info on performance

Modified: lucene/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/AssemblyInfo.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs Wed Feb 17 19:33:03 2010
@@ -16,7 +16,7 @@
 [assembly: AssemblyDefaultAlias("Lucene.Net")]
 [assembly: AssemblyCulture("")]
 
-[assembly: AssemblyInformationalVersionAttribute("2.9.1")]
+[assembly: AssemblyInformationalVersionAttribute("2.9.2")]
 
 //
 // Version information for an assembly consists of the following four values:
@@ -29,7 +29,7 @@
 // You can specify all the values or you can default the Revision and Build Numbers 
 // by using the '*' as shown below:
 
-[assembly: AssemblyVersion("2.9.1.002")]
+[assembly: AssemblyVersion("2.9.2.001")]
 
 //
 // In order to sign your assembly you must specify a key to use. Refer to the 

Modified: lucene/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Index/TestDocumentWriter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs Wed Feb 17 19:33:03 2010
@@ -192,6 +192,7 @@
 				}
 				else
 				{
+                    ClearAttributes();
 					termAtt.SetTermBuffer(tokens[index++]);
 					return true;
 				}

Modified: lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs Wed Feb 17 19:33:03 2010
@@ -5699,5 +5699,122 @@
 			w.Close();
 			d.Close();
 		}
+
+        [Test]
+        public void TestEmbeddedFFFF()
+        {
+
+            Directory d = new MockRAMDirectory();
+            IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+            Document doc = new Document();
+            doc.Add(new Field("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED));
+            w.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new Field("field", "a", Field.Store.NO, Field.Index.ANALYZED));
+            w.AddDocument(doc);
+            w.Close();
+
+            _TestUtil.CheckIndex(d);
+            d.Close();
+        }
+
+        [Test]
+        public void TestNoDocsIndex()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+            writer.SetUseCompoundFile(false);
+            //ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
+            //writer.SetInfoStream(new PrintStream(bos));
+            writer.AddDocument(new Document());
+            writer.Close();
+
+            _TestUtil.CheckIndex(dir);
+            dir.Close();
+        }
+
+        class LUCENE_2095_Thread : SupportClass.ThreadClass
+        {
+            IndexWriter w = null;
+            Directory dir = null;
+            long endTime = 0;
+            System.Collections.IList failed = null;
+            int finalI = 0;
+
+            public LUCENE_2095_Thread(IndexWriter w, long endTime, Directory dir, System.Collections.IList failed, int finalI)
+            {
+                this.w = w;
+                this.dir = dir;
+                this.endTime = endTime;
+                this.failed = failed;
+                this.finalI = finalI;
+            }
+
+            override public void Run()
+            {
+                try
+                {
+                    Document doc = new Document();
+                    IndexReader r = IndexReader.Open(dir);
+                    Field f = new Field("f", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+                    doc.Add(f);
+                    int count = 0;
+                    while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime && failed.Count == 0)
+                    {
+                        for (int j = 0; j < 10; j++)
+                        {
+                            String s = finalI + "_" + (count++).ToString();
+                            f.SetValue(s);
+                            w.AddDocument(doc);
+                            w.Commit();
+                            IndexReader r2 = r.Reopen();
+                            Assert.IsTrue(r2 != r);
+                            r.Close();
+                            r = r2;
+                            Assert.AreEqual(1, r.DocFreq(new Term("f", s)), "term=f:" + s);
+                        }
+                    }
+                    r.Close();
+                }
+                catch (Exception t)
+                {
+                    lock (failed)
+                    {
+                        failed.Add(this);
+                    }
+                    throw t;
+                }
+            }
+        }
+
+        // LUCENE-2095: make sure with multiple threads commit
+        // doesn't return until all changes are in fact in the
+        // index
+        [Test]
+        public void TestCommitThreadSafety()
+        {
+            int NUM_THREADS = 5;
+            double RUN_SEC = 0.5;
+            Directory dir = new MockRAMDirectory();
+            IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+            w.Commit();
+            System.Collections.IList failed = new System.Collections.ArrayList();
+            LUCENE_2095_Thread[] threads = new LUCENE_2095_Thread[NUM_THREADS];
+            long endTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + ((long)(RUN_SEC * 1000));
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                int finalI = i;
+
+                threads[i] = new LUCENE_2095_Thread(w, endTime, dir, failed, finalI);
+                threads[i].Start();
+            }
+            for (int i = 0; i < NUM_THREADS; i++)
+            {
+                threads[i].Join();
+            }
+            w.Close();
+            dir.Close();
+            Assert.AreEqual(0, failed.Count);
+        }
 	}
 }
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Index/TestIndexWriterReader.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs Wed Feb 17 19:33:03 2010
@@ -1084,5 +1084,37 @@
 			r.Close();
 			dir.Close();
 		}
+
+        [Test]
+        public void TestDeletesNumDocs()
+        {
+            Directory dir = new MockRAMDirectory();
+            IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
+                                                       IndexWriter.MaxFieldLength.LIMITED);
+            Document doc = new Document();
+            doc.Add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
+            Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
+            doc.Add(id);
+            id.SetValue("0");
+            w.AddDocument(doc);
+            id.SetValue("1");
+            w.AddDocument(doc);
+            IndexReader r = w.GetReader();
+            Assert.AreEqual(2, r.NumDocs());
+            r.Close();
+
+            w.DeleteDocuments(new Term("id", "0"));
+            r = w.GetReader();
+            Assert.AreEqual(1, r.NumDocs());
+            r.Close();
+
+            w.DeleteDocuments(new Term("id", "1"));
+            r = w.GetReader();
+            Assert.AreEqual(0, r.NumDocs());
+            r.Close();
+
+            w.Close();
+            dir.Close();
+        }
 	}
 }
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Index/TestPayloads.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs Wed Feb 17 19:33:03 2010
@@ -635,6 +635,7 @@
 				if (!first)
 					return false;
 				first = false;
+                ClearAttributes();
 				termAtt.SetTermBuffer(term);
 				payloadAtt.SetPayload(new Payload(payload));
 				return true;

Modified: lucene/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsReader.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs Wed Feb 17 19:33:03 2010
@@ -190,6 +190,7 @@
 				else
 				{
 					TestToken testToken = Enclosing_Instance.tokens[tokenUpto++];
+                    ClearAttributes();
 					termAtt.SetTermBuffer(testToken.text);
 					offsetAtt.SetOffset(testToken.startOffset, testToken.endOffset);
 					if (tokenUpto > 1)

Modified: lucene/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Index/TestTermdocPerf.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs Wed Feb 17 19:33:03 2010
@@ -51,6 +51,7 @@
 			num--;
 			if (num >= 0)
 			{
+                ClearAttributes();
 				termAtt.SetTermBuffer(value_Renamed);
 				return true;
 			}

Modified: lucene/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/QueryParser/TestQueryParser.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs Wed Feb 17 19:33:03 2010
@@ -142,6 +142,7 @@
 				if (inPhrase)
 				{
 					inPhrase = false;
+                    ClearAttributes();
 					termAtt.SetTermBuffer("phrase2");
 					offsetAtt.SetOffset(savedStart, savedEnd);
 					return true;

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/Function/FunctionTestSetup.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs Wed Feb 17 19:33:03 2010
@@ -57,9 +57,18 @@
 		protected internal Analyzer anlzr;
 		
 		/* @override constructor */
-		public FunctionTestSetup(System.String name):base(name)
+		public FunctionTestSetup(System.String name):this(name, false)
 		{
 		}
+        
+        private bool doMultiSegment;
+
+        public FunctionTestSetup(String name, bool doMultiSegment) : base(name)
+        {
+            this.doMultiSegment = doMultiSegment;
+        }
+
+
         public FunctionTestSetup()
             : base()
         {
@@ -96,6 +105,10 @@
 				AddDoc(iw, i);
 				done[i] = true;
 				i = (i + 4) % N_DOCS;
+                if (doMultiSegment && remaining % 3 == 0) 
+                {
+                    iw.Commit();
+                }
 				remaining--;
 			}
 			iw.Close();

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/Function/TestCustomScoreQuery.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs Wed Feb 17 19:33:03 2010
@@ -27,6 +27,9 @@
 using QueryUtils = Lucene.Net.Search.QueryUtils;
 using TopDocs = Lucene.Net.Search.TopDocs;
 
+using Lucene.Net.Search;
+using Lucene.Net.Index;
+
 namespace Lucene.Net.Search.Function
 {
 	
@@ -36,7 +39,7 @@
 	{
 		
 		/* @override constructor */
-		public TestCustomScoreQuery(System.String name):base(name)
+		public TestCustomScoreQuery(System.String name):base(name, true)
 		{
 		}
         public TestCustomScoreQuery()
@@ -76,7 +79,7 @@
 			// INT field can be parsed as float
 			DoTestCustomScore(INT_FIELD, FieldScoreQuery.Type.FLOAT, 1.0);
 			DoTestCustomScore(INT_FIELD, FieldScoreQuery.Type.FLOAT, 5.0);
-			// same values, but in flot format
+			// same values, but in float format
 			DoTestCustomScore(FLOAT_FIELD, FieldScoreQuery.Type.FLOAT, 1.0);
 			DoTestCustomScore(FLOAT_FIELD, FieldScoreQuery.Type.FLOAT, 6.0);
 		}
@@ -136,6 +139,8 @@
 				if (valSrcScores.Length == 1)
 				{
 					return subQueryScore + valSrcScores[0];
+                    // confirm that skipping beyond the last doc, on the
+                    // previous reader, hits NO_MORE_DOCS
 				}
 				return (subQueryScore + valSrcScores[0]) * valSrcScores[1]; // we know there are two
 			}
@@ -160,6 +165,50 @@
 				return exp2;
 			}
 		}
+
+        private class CustomExternalQuery : CustomScoreQuery 
+        {
+            private IndexReader reader;
+            private int[] values;
+
+            public override float CustomScore(int doc, float subScore, float valSrcScore) 
+            {
+                Assert.IsTrue(doc <= reader.MaxDoc());
+                return (float) values[doc];
+            }
+
+            public override void SetNextReader(IndexReader r)
+            {
+                reader = r;
+                values = FieldCache_Fields.DEFAULT.GetInts(r, INT_FIELD);
+            }
+
+            public CustomExternalQuery(Query q) : base(q)
+            {  }
+        }
+
+        [Test]
+        public void TestCustomExternalQuery() 
+        {
+            QueryParser qp = new QueryParser(TEXT_FIELD,anlzr); 
+            String qtxt = "first aid text"; // from the doc texts in FunctionQuerySetup.
+            Query q1 = qp.Parse(qtxt); 
+        
+            Query q = new CustomExternalQuery(q1);
+            Log(q);
+
+            IndexSearcher s = new IndexSearcher(dir);
+            TopDocs hits = s.Search(q, 1000);
+            Assert.AreEqual(N_DOCS, hits.totalHits);
+            for(int i=0;i<N_DOCS;i++) 
+            {
+                int doc = hits.scoreDocs[i].doc;
+                float score = hits.scoreDocs[i].score;
+                Assert.AreEqual(score, (float)1 + (4 * doc) % N_DOCS, 0.0001, "doc=" + doc);
+            }
+            s.Close();
+        }
+
 		
 		// Test that FieldScoreQuery returns docs with expected score.
 		private void  DoTestCustomScore(System.String field, FieldScoreQuery.Type tp, double dboost)

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/Function/TestFieldScoreQuery.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs Wed Feb 17 19:33:03 2010
@@ -45,7 +45,7 @@
 	{
 		
 		/* @override constructor */
-		public TestFieldScoreQuery(System.String name):base(name)
+		public TestFieldScoreQuery(System.String name):base(name, true)
 		{
 		}
         public TestFieldScoreQuery()
@@ -200,7 +200,7 @@
 			expectedArrayTypes[FieldScoreQuery.Type.FLOAT] = new float[0];
 			
 			IndexSearcher s = new IndexSearcher(dir);
-			System.Object innerArray = null;
+			System.Object[] innerArray = new Object[s.GetIndexReader().GetSequentialSubReaders().Length];
 			
 			bool warned = false; // print warning once.
 			for (int i = 0; i < 10; i++)
@@ -216,14 +216,14 @@
 					{
 						if (i == 0)
 						{
-							innerArray = q.valSrc_ForNUnit.GetValues(reader).GetInnerArray();
-							Log(i + ".  compare: " + innerArray.GetType() + " to " + expectedArrayTypes[tp].GetType());
-							Assert.AreEqual(innerArray.GetType(), expectedArrayTypes[tp].GetType(), "field values should be cached in the correct array type!");
+							innerArray[j] = q.valSrc_ForNUnit.GetValues(reader).GetInnerArray();
+							Log(i + ".  compare: " + innerArray[j].GetType() + " to " + expectedArrayTypes[tp].GetType());
+							Assert.AreEqual(innerArray[j].GetType(), expectedArrayTypes[tp].GetType(), "field values should be cached in the correct array type!");
 						}
 						else
 						{
-							Log(i + ".  compare: " + innerArray + " to " + q.valSrc_ForNUnit.GetValues(reader).GetInnerArray());
-							Assert.AreSame(innerArray, q.valSrc_ForNUnit.GetValues(reader).GetInnerArray(), "field values should be cached and reused!");
+							Log(i + ".  compare: " + innerArray[j] + " to " + q.valSrc_ForNUnit.GetValues(reader).GetInnerArray());
+							Assert.AreSame(innerArray[j], q.valSrc_ForNUnit.GetValues(reader).GetInnerArray(), "field values should be cached and reused!");
 						}
 					}
 					catch (System.NotSupportedException e)

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/Function/TestOrdValues.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs Wed Feb 17 19:33:03 2010
@@ -45,7 +45,7 @@
 	{
 		
 		/* @override constructor */
-		public TestOrdValues(System.String name):base(name)
+		public TestOrdValues(System.String name):base(name, false)
 		{
 		}
         public TestOrdValues()

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/QueryUtils.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs Wed Feb 17 19:33:03 2010
@@ -117,16 +117,17 @@
 		}
 		private class AnonymousClassCollector1:Collector
 		{
-			public AnonymousClassCollector1(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff)
+			public AnonymousClassCollector1(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff, IndexReader[] lastReader)
 			{
-				InitBlock(lastDoc, q, s, maxDiff);
+				InitBlock(lastDoc, q, s, maxDiff, lastReader);
 			}
-			private void  InitBlock(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff)
+			private void  InitBlock(int[] lastDoc, Lucene.Net.Search.Query q, Lucene.Net.Search.IndexSearcher s, float maxDiff, IndexReader[] lastReader)
 			{
 				this.lastDoc = lastDoc;
 				this.q = q;
 				this.s = s;
 				this.maxDiff = maxDiff;
+                this.lastReader = lastReader;
 			}
 			private int[] lastDoc;
 			private Lucene.Net.Search.Query q;
@@ -134,6 +135,8 @@
 			private float maxDiff;
 			private Scorer scorer;
 			private IndexReader reader;
+            private IndexReader[] lastReader;
+
 			public override void  SetScorer(Scorer scorer)
 			{
 				this.scorer = scorer;
@@ -164,9 +167,24 @@
 			}
 			public override void  SetNextReader(IndexReader reader, int docBase)
 			{
-				this.reader = reader;
-				lastDoc[0] = - 1;
-			}
+		        // confirm that skipping beyond the last doc, on the
+                // previous reader, hits NO_MORE_DOCS
+                if (lastReader[0] != null) 
+                {
+                    IndexReader previousReader = lastReader[0];
+                    Weight w = q.Weight(new IndexSearcher(previousReader));
+                    Scorer scorer = w.Scorer(previousReader, true, false);
+                    if (scorer != null)
+                    {
+                        bool more = scorer.Advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
+                        Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.DocID());
+                    }
+                }
+
+                this.reader = lastReader[0] = reader;
+                lastDoc[0] = -1;
+            }
+
 			public override bool AcceptsDocsOutOfOrder()
 			{
 				return false;
@@ -421,23 +439,22 @@
 			//System.out.println("checkFirstSkipTo: "+q);
             float maxDiff = 1e-4f; //{{Lucene.Net-2.9.1}}Intentional diversion from Java Lucene
 			int[] lastDoc = new int[]{- 1};
-			s.Search(q, new AnonymousClassCollector1(lastDoc, q, s, maxDiff));
-			
-			System.Collections.IList readerList = new System.Collections.ArrayList();
-			ReaderUtil.GatherSubReaders(readerList, s.GetIndexReader());
-			IndexReader[] readers = (IndexReader[])(new System.Collections.ArrayList(readerList).ToArray(typeof(IndexReader)));
-			for (int i = 0; i < readers.Length; i++)
-			{
-				IndexReader reader = readers[i];
-				Weight w = q.Weight(s);
-				Scorer scorer = w.Scorer(reader, true, false);
-				
+            IndexReader[] lastReader = {null};
+
+			s.Search(q, new AnonymousClassCollector1(lastDoc, q, s, maxDiff, lastReader));
+			
+			if(lastReader[0] != null)
+            {
+                // confirm that skipping beyond the last doc, on the
+                // previous reader, hits NO_MORE_DOCS
+                IndexReader previousReader = lastReader[0];
+                Weight w = q.Weight(new IndexSearcher(previousReader));
+                Scorer scorer = w.Scorer(previousReader, true, false);
+
 				if (scorer != null)
 				{
-					bool more = scorer.Advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
-					
-					if (more && lastDoc[0] != - 1)
-						Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.DocID());
+					bool more = scorer.Advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;					
+					Assert.IsFalse(more, "query's last doc was " + lastDoc[0] + " but skipTo(" + (lastDoc[0] + 1) + ") got to " + scorer.DocID());
 				}
 			}
 		}

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/TestFilteredSearch.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs Wed Feb 17 19:33:03 2010
@@ -28,6 +28,7 @@
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using OpenBitSet = Lucene.Net.Util.OpenBitSet;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Lucene.Net.Store;
 
 namespace Lucene.Net.Search
 {
@@ -37,62 +38,94 @@
     [TestFixture]
 	public class TestFilteredSearch:LuceneTestCase
 	{
-				
+
+        public TestFilteredSearch(): base("")
+        {
+        }
+
 		private const System.String FIELD = "category";
 		
 		[Test]
 		public virtual void  TestFilteredSearch_Renamed()
 		{
-			RAMDirectory directory = new RAMDirectory();
-			int[] filterBits = new int[]{1, 36};
-			Filter filter = new SimpleDocIdSetFilter(filterBits);
-			
-			
-			try
-			{
-				IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-				for (int i = 0; i < 60; i++)
-				{
-					//Simple docs
-					Document doc = new Document();
-					doc.Add(new Field(FIELD, System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
-					writer.AddDocument(doc);
-				}
-				writer.Close();
-				
-				BooleanQuery booleanQuery = new BooleanQuery();
-				booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), BooleanClause.Occur.SHOULD);
-				
-				
-				IndexSearcher indexSearcher = new IndexSearcher(directory);
-				ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).scoreDocs;
-				Assert.AreEqual(1, hits.Length, "Number of matched documents");
-			}
-			catch (System.IO.IOException e)
-			{
-				Assert.Fail(e.Message);
-			}
+            bool enforceSingleSegment = true;
+            RAMDirectory directory = new RAMDirectory();
+            int[] filterBits = { 1, 36 };
+            SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
+            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            SearchFiltered(writer, directory, filter, enforceSingleSegment);
+            // run the test on more than one segment
+            enforceSingleSegment = false;
+            // reset - it is stateful
+            filter.Reset();
+            writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+            // we index 60 docs - this will create 6 segments
+            writer.SetMaxBufferedDocs(10);
+            SearchFiltered(writer, directory, filter, enforceSingleSegment);
 		}
+
+
+        public void SearchFiltered(IndexWriter writer, Directory directory, Filter filter, bool optimize)
+        {
+            try
+            {
+                for (int i = 0; i < 60; i++)
+                {//Simple docs
+                    Document doc = new Document();
+                    doc.Add(new Field(FIELD, i.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
+                    writer.AddDocument(doc);
+                }
+                if (optimize)
+                    writer.Optimize();
+                writer.Close();
+
+                BooleanQuery booleanQuery = new BooleanQuery();
+                booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), BooleanClause.Occur.SHOULD);
+
+
+                IndexSearcher indexSearcher = new IndexSearcher(directory);
+                ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).scoreDocs;
+                Assert.AreEqual(1, hits.Length, "Number of matched documents");
+
+            }
+            catch (System.IO.IOException e)
+            {
+                Assert.Fail(e.Message);
+            }
+
+        }
 		
-		
-		[Serializable]
-		public sealed class SimpleDocIdSetFilter:Filter
-		{
-			private OpenBitSet bits;
-			
-			public SimpleDocIdSetFilter(int[] docs)
-			{
-				bits = new OpenBitSet();
-				for (int i = 0; i < docs.Length; i++)
-				{
-					bits.Set(docs[i]);
-				}
-			}
-			
-			public override DocIdSet GetDocIdSet(IndexReader reader)
-			{
-				return bits;
-			}
-		}
+
+        [Serializable]
+        public sealed class SimpleDocIdSetFilter : Filter
+        {
+            private int docBase;
+            private int[] docs;
+            private int index;
+            public SimpleDocIdSetFilter(int[] docs)
+            {
+                this.docs = docs;
+            }
+            public override DocIdSet GetDocIdSet(IndexReader reader)
+            {
+                OpenBitSet set = new OpenBitSet();
+                int limit = docBase + reader.MaxDoc();
+                for (; index < docs.Length; index++)
+                {
+                    int docId = docs[index];
+                    if (docId > limit)
+                        break;
+                    set.Set(docId - docBase);
+                }
+                docBase = limit;
+                return set.IsEmpty() ? null : set;
+            }
+
+            public void Reset()
+            {
+                index = 0;
+                docBase = 0;
+            }
+        }
 	}
 }
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/TestPositionIncrement.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs Wed Feb 17 19:33:03 2010
@@ -96,6 +96,7 @@
 				{
 					if (i == TOKENS.Length)
 						return false;
+                    ClearAttributes();
 					termAtt.SetTermBuffer(TOKENS[i]);
 					offsetAtt.SetOffset(i, i);
 					posIncrAtt.SetPositionIncrement(INCREMENTS[i]);

Modified: lucene/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Search/TestTermRangeQuery.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs Wed Feb 17 19:33:03 2010
@@ -276,6 +276,7 @@
 						return false;
 					else
 					{
+                        ClearAttributes();
 						done = true;
 						if (count == 1)
 						{

Modified: lucene/lucene.net/trunk/C#/src/Test/Store/TestRAMDirectory.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Store/TestRAMDirectory.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Store/TestRAMDirectory.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Store/TestRAMDirectory.cs Wed Feb 17 19:33:03 2010
@@ -81,10 +81,6 @@
 					{
 						throw new System.SystemException("", e);
 					}
-					lock (ramDir)
-					{
-						Assert.AreEqual(ramDir.SizeInBytes(), ramDir.GetRecomputedSizeInBytes());
-					}
 				}
 			}
 		}
@@ -205,7 +201,7 @@
 			searcher.Close();
 		}
 		
-		private int numThreads = 50;
+		private int numThreads = 10;
 		private int docsPerThread = 40;
 		
         [Test]

Modified: lucene/lucene.net/trunk/C#/src/Test/TestSupportClass.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/TestSupportClass.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/TestSupportClass.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/TestSupportClass.cs Wed Feb 17 19:33:03 2010
@@ -945,7 +945,7 @@
             {
                 LUCENENET_100_CreateIndex();
 
-                System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Tcp.TcpChannel(38085));
+                System.Runtime.Remoting.Channels.ChannelServices.RegisterChannel(new System.Runtime.Remoting.Channels.Tcp.TcpChannel(38087));
 
                 Lucene.Net.Search.IndexSearcher indexSearcher = new Lucene.Net.Search.IndexSearcher(LUCENENET_100_Dir);
                 System.Runtime.Remoting.RemotingServices.Marshal(indexSearcher, "Searcher");
@@ -973,7 +973,7 @@
         {
             try
             {
-                Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:38085/Searcher");
+                Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:38087/Searcher");
                 Lucene.Net.Search.MultiSearcher searcher = new Lucene.Net.Search.MultiSearcher(new Lucene.Net.Search.Searchable[] { s });
 
                 Lucene.Net.Search.Query q = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field1", "moon"));
@@ -981,7 +981,7 @@
                 Lucene.Net.Search.Sort sort = new Lucene.Net.Search.Sort();
                 sort.SetSort(new Lucene.Net.Search.SortField("field2", Lucene.Net.Search.SortField.INT));
 
-                Lucene.Net.Search.Hits h = searcher.Search(q, sort);
+                Lucene.Net.Search.TopDocs h = searcher.Search(q, null,100, sort);
             }
             catch (Exception ex)
             {

Modified: lucene/lucene.net/trunk/C#/src/Test/Util/TestAttributeSource.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Test/Util/TestAttributeSource.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Test/Util/TestAttributeSource.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Test/Util/TestAttributeSource.cs Wed Feb 17 19:33:03 2010
@@ -24,122 +24,144 @@
 
 namespace Lucene.Net.Util
 {
-	
+
     [TestFixture]
-	public class TestAttributeSource:LuceneTestCase
-	{
-		
+    public class TestAttributeSource : LuceneTestCase
+    {
+
         [Test]
-		public virtual void  TestCaptureState()
-		{
-			// init a first instance
-			AttributeSource src = new AttributeSource();
-			TermAttribute termAtt = (TermAttribute) src.AddAttribute(typeof(TermAttribute));
-			TypeAttribute typeAtt = (TypeAttribute) src.AddAttribute(typeof(TypeAttribute));
-			termAtt.SetTermBuffer("TestTerm");
-			typeAtt.SetType("TestType");
-			int hashCode = src.GetHashCode();
-			
-			AttributeSource.State state = src.CaptureState();
-			
-			// modify the attributes
-			termAtt.SetTermBuffer("AnotherTestTerm");
-			typeAtt.SetType("AnotherTestType");
-			Assert.IsTrue(hashCode != src.GetHashCode(), "Hash code should be different");
-			
-			src.RestoreState(state);
-			Assert.AreEqual("TestTerm", termAtt.Term());
-			Assert.AreEqual("TestType", typeAtt.Type());
-			Assert.AreEqual(hashCode, src.GetHashCode(), "Hash code should be equal after restore");
-			
-			// restore into an exact configured copy
-			AttributeSource copy = new AttributeSource();
-			copy.AddAttribute(typeof(TermAttribute));
-			copy.AddAttribute(typeof(TypeAttribute));
-			copy.RestoreState(state);
-			Assert.AreEqual(src.GetHashCode(), copy.GetHashCode(), "Both AttributeSources should have same hashCode after restore");
-			Assert.AreEqual(src, copy, "Both AttributeSources should be equal after restore");
-			
-			// init a second instance (with attributes in different order and one additional attribute)
-			AttributeSource src2 = new AttributeSource();
-			typeAtt = (TypeAttribute) src2.AddAttribute(typeof(TypeAttribute));
-			Lucene.Net.Analysis.Tokenattributes.FlagsAttribute flagsAtt = (Lucene.Net.Analysis.Tokenattributes.FlagsAttribute) src2.AddAttribute(typeof(Lucene.Net.Analysis.Tokenattributes.FlagsAttribute));
-			termAtt = (TermAttribute) src2.AddAttribute(typeof(TermAttribute));
-			flagsAtt.SetFlags(12345);
-			
-			src2.RestoreState(state);
-			Assert.AreEqual("TestTerm", termAtt.Term());
-			Assert.AreEqual("TestType", typeAtt.Type());
-			Assert.AreEqual(12345, flagsAtt.GetFlags(), "FlagsAttribute should not be touched");
-			
-			// init a third instance missing one Attribute
-			AttributeSource src3 = new AttributeSource();
-			termAtt = (TermAttribute) src3.AddAttribute(typeof(TermAttribute));
-			try
-			{
-				src3.RestoreState(state);
-				Assert.Fail("The third instance is missing the TypeAttribute, so restoreState() should throw IllegalArgumentException");
-			}
-			catch (System.ArgumentException iae)
-			{
-				// pass
-			}
-		}
-		
+        public virtual void TestCaptureState()
+        {
+            // init a first instance
+            AttributeSource src = new AttributeSource();
+            TermAttribute termAtt = (TermAttribute)src.AddAttribute(typeof(TermAttribute));
+            TypeAttribute typeAtt = (TypeAttribute)src.AddAttribute(typeof(TypeAttribute));
+            termAtt.SetTermBuffer("TestTerm");
+            typeAtt.SetType("TestType");
+            int hashCode = src.GetHashCode();
+
+            AttributeSource.State state = src.CaptureState();
+
+            // modify the attributes
+            termAtt.SetTermBuffer("AnotherTestTerm");
+            typeAtt.SetType("AnotherTestType");
+            Assert.IsTrue(hashCode != src.GetHashCode(), "Hash code should be different");
+
+            src.RestoreState(state);
+            Assert.AreEqual("TestTerm", termAtt.Term());
+            Assert.AreEqual("TestType", typeAtt.Type());
+            Assert.AreEqual(hashCode, src.GetHashCode(), "Hash code should be equal after restore");
+
+            // restore into an exact configured copy
+            AttributeSource copy = new AttributeSource();
+            copy.AddAttribute(typeof(TermAttribute));
+            copy.AddAttribute(typeof(TypeAttribute));
+            copy.RestoreState(state);
+            Assert.AreEqual(src.GetHashCode(), copy.GetHashCode(), "Both AttributeSources should have same hashCode after restore");
+            Assert.AreEqual(src, copy, "Both AttributeSources should be equal after restore");
+
+            // init a second instance (with attributes in different order and one additional attribute)
+            AttributeSource src2 = new AttributeSource();
+            typeAtt = (TypeAttribute)src2.AddAttribute(typeof(TypeAttribute));
+            Lucene.Net.Analysis.Tokenattributes.FlagsAttribute flagsAtt = (Lucene.Net.Analysis.Tokenattributes.FlagsAttribute)src2.AddAttribute(typeof(Lucene.Net.Analysis.Tokenattributes.FlagsAttribute));
+            termAtt = (TermAttribute)src2.AddAttribute(typeof(TermAttribute));
+            flagsAtt.SetFlags(12345);
+
+            src2.RestoreState(state);
+            Assert.AreEqual("TestTerm", termAtt.Term());
+            Assert.AreEqual("TestType", typeAtt.Type());
+            Assert.AreEqual(12345, flagsAtt.GetFlags(), "FlagsAttribute should not be touched");
+
+            // init a third instance missing one Attribute
+            AttributeSource src3 = new AttributeSource();
+            termAtt = (TermAttribute)src3.AddAttribute(typeof(TermAttribute));
+            try
+            {
+                src3.RestoreState(state);
+                Assert.Fail("The third instance is missing the TypeAttribute, so restoreState() should throw IllegalArgumentException");
+            }
+            catch (System.ArgumentException iae)
+            {
+                // pass
+            }
+        }
+
         [Test]
-		public virtual void  TestCloneAttributes()
-		{
-			AttributeSource src = new AttributeSource();
-			TermAttribute termAtt = (TermAttribute) src.AddAttribute(typeof(TermAttribute));
-			TypeAttribute typeAtt = (TypeAttribute) src.AddAttribute(typeof(TypeAttribute));
-			termAtt.SetTermBuffer("TestTerm");
-			typeAtt.SetType("TestType");
-			
-			AttributeSource clone = src.CloneAttributes();
-			System.Collections.IEnumerator it = clone.GetAttributeClassesIterator().GetEnumerator();
+        public virtual void TestCloneAttributes()
+        {
+            AttributeSource src = new AttributeSource();
+            TermAttribute termAtt = (TermAttribute)src.AddAttribute(typeof(TermAttribute));
+            TypeAttribute typeAtt = (TypeAttribute)src.AddAttribute(typeof(TypeAttribute));
+            termAtt.SetTermBuffer("TestTerm");
+            typeAtt.SetType("TestType");
+
+            AttributeSource clone = src.CloneAttributes();
+            System.Collections.IEnumerator it = clone.GetAttributeClassesIterator().GetEnumerator();
             Assert.IsTrue(it.MoveNext());
-			Assert.AreEqual(typeof(TermAttribute), it.Current, "TermAttribute must be the first attribute");
+            Assert.AreEqual(typeof(TermAttribute), it.Current, "TermAttribute must be the first attribute");
             Assert.IsTrue(it.MoveNext());
             Assert.AreEqual(typeof(TypeAttribute), it.Current, "TypeAttribute must be the second attribute");
-			Assert.IsFalse(it.MoveNext(), "No more attributes");
-			
-			TermAttribute termAtt2 = (TermAttribute)clone.GetAttribute(typeof(TermAttribute));
-			TypeAttribute typeAtt2 = (TypeAttribute)clone.GetAttribute(typeof(TypeAttribute));
-			Assert.IsFalse(ReferenceEquals(termAtt2, termAtt), "TermAttribute of original and clone must be different instances");
-			Assert.IsFalse(ReferenceEquals(typeAtt2, typeAtt), "TypeAttribute of original and clone must be different instances");
-			Assert.AreEqual(termAtt2, termAtt, "TermAttribute of original and clone must be equal");
-			Assert.AreEqual(typeAtt2, typeAtt, "TypeAttribute of original and clone must be equal");
-		}
-		
+            Assert.IsFalse(it.MoveNext(), "No more attributes");
+
+            TermAttribute termAtt2 = (TermAttribute)clone.GetAttribute(typeof(TermAttribute));
+            TypeAttribute typeAtt2 = (TypeAttribute)clone.GetAttribute(typeof(TypeAttribute));
+            Assert.IsFalse(ReferenceEquals(termAtt2, termAtt), "TermAttribute of original and clone must be different instances");
+            Assert.IsFalse(ReferenceEquals(typeAtt2, typeAtt), "TypeAttribute of original and clone must be different instances");
+            Assert.AreEqual(termAtt2, termAtt, "TermAttribute of original and clone must be equal");
+            Assert.AreEqual(typeAtt2, typeAtt, "TypeAttribute of original and clone must be equal");
+        }
+
         [Test]
-		public virtual void  TestToStringAndMultiAttributeImplementations()
-		{
-			AttributeSource src = new AttributeSource();
-			TermAttribute termAtt = (TermAttribute) src.AddAttribute(typeof(TermAttribute));
-			TypeAttribute typeAtt = (TypeAttribute) src.AddAttribute(typeof(TypeAttribute));
-			termAtt.SetTermBuffer("TestTerm");
-			typeAtt.SetType("TestType");
-			Assert.AreEqual("(" + termAtt.ToString() + "," + typeAtt.ToString() + ")", src.ToString(), "Attributes should appear in original order");
-			System.Collections.Generic.IEnumerator<AttributeImpl> it = src.GetAttributeImplsIterator().GetEnumerator();
-			Assert.IsTrue(it.MoveNext(), "Iterator should have 2 attributes left");
-			Assert.AreSame(termAtt, it.Current, "First AttributeImpl from iterator should be termAtt");
-			Assert.IsTrue(it.MoveNext(), "Iterator should have 1 attributes left");
-			Assert.AreSame(typeAtt, it.Current, "Second AttributeImpl from iterator should be typeAtt");
-			Assert.IsFalse(it.MoveNext(), "Iterator should have 0 attributes left");
-			
-			src = new AttributeSource();
-			src.AddAttributeImpl(new Token());
-			// this should not add a new attribute as Token implements TermAttribute, too
-			termAtt = (TermAttribute) src.AddAttribute(typeof(TermAttribute));
-			Assert.IsTrue(termAtt is Token, "TermAttribute should be implemented by Token");
-			// get the Token attribute and check, that it is the only one
+        public virtual void TestToStringAndMultiAttributeImplementations()
+        {
+            AttributeSource src = new AttributeSource();
+            TermAttribute termAtt = (TermAttribute)src.AddAttribute(typeof(TermAttribute));
+            TypeAttribute typeAtt = (TypeAttribute)src.AddAttribute(typeof(TypeAttribute));
+            termAtt.SetTermBuffer("TestTerm");
+            typeAtt.SetType("TestType");
+            Assert.AreEqual("(" + termAtt.ToString() + "," + typeAtt.ToString() + ")", src.ToString(), "Attributes should appear in original order");
+            System.Collections.Generic.IEnumerator<AttributeImpl> it = src.GetAttributeImplsIterator().GetEnumerator();
+            Assert.IsTrue(it.MoveNext(), "Iterator should have 2 attributes left");
+            Assert.AreSame(termAtt, it.Current, "First AttributeImpl from iterator should be termAtt");
+            Assert.IsTrue(it.MoveNext(), "Iterator should have 1 attributes left");
+            Assert.AreSame(typeAtt, it.Current, "Second AttributeImpl from iterator should be typeAtt");
+            Assert.IsFalse(it.MoveNext(), "Iterator should have 0 attributes left");
+
+            src = new AttributeSource();
+            src.AddAttributeImpl(new Token());
+            // this should not add a new attribute as Token implements TermAttribute, too
+            termAtt = (TermAttribute)src.AddAttribute(typeof(TermAttribute));
+            Assert.IsTrue(termAtt is Token, "TermAttribute should be implemented by Token");
+            // get the Token attribute and check, that it is the only one
             it = src.GetAttributeImplsIterator().GetEnumerator();
             Assert.IsTrue(it.MoveNext());
             Token tok = (Token)it.Current;
-			Assert.IsFalse(it.MoveNext(), "There should be only one attribute implementation instance");
-			
-			termAtt.SetTermBuffer("TestTerm");
-			Assert.AreEqual("(" + tok.ToString() + ")", src.ToString(), "Token should only printed once");
-		}
-	}
+            Assert.IsFalse(it.MoveNext(), "There should be only one attribute implementation instance");
+
+            termAtt.SetTermBuffer("TestTerm");
+            Assert.AreEqual("(" + tok.ToString() + ")", src.ToString(), "Token should only printed once");
+        }
+
+        [Test]
+        public void TestInvalidArguments()
+        {
+            try
+            {
+                AttributeSource src = new AttributeSource();
+                src.AddAttribute(typeof(Token));
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+            catch (ArgumentException iae) { }
+
+            try
+            {
+                AttributeSource src = new AttributeSource();
+                src.AddAttribute(typeof(System.Collections.IEnumerator));
+                Assert.Fail("Should throw IllegalArgumentException");
+            }
+            catch (ArgumentException iae) { }
+        }
+    }
+    
+    
 }
\ No newline at end of file



Mime
View raw message