lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r832486 [4/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene....
Date Tue, 03 Nov 2009 18:06:38 GMT
Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestPerFieldAnalzyerWrapper.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,15 +19,15 @@
 
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 
 namespace Lucene.Net.Analysis
 {
-	
-	[TestFixture]	
-	public class TestPerFieldAnalyzerWrapper : LuceneTestCase
+
+    [TestFixture]
+    public class TestPerFieldAnalzyerWrapper : BaseTokenStreamTestCase
 	{
-		[Test]
+        [Test]
 		public virtual void  TestPerField()
 		{
 			System.String text = "Qwerty";
@@ -35,13 +35,15 @@
 			analyzer.AddAnalyzer("special", new SimpleAnalyzer());
 			
 			TokenStream tokenStream = analyzer.TokenStream("field", new System.IO.StringReader(text));
-            Token reusableToken = new Token();
-			Token nextToken = tokenStream.Next(reusableToken);
-			Assert.AreEqual("Qwerty", nextToken.Term(), "WhitespaceAnalyzer does not lowercase");
+			TermAttribute termAtt = (TermAttribute) tokenStream.GetAttribute(typeof(TermAttribute));
+			
+			Assert.IsTrue(tokenStream.IncrementToken());
+			Assert.AreEqual("Qwerty", termAtt.Term(), "WhitespaceAnalyzer does not lowercase");
 			
 			tokenStream = analyzer.TokenStream("special", new System.IO.StringReader(text));
-			nextToken = tokenStream.Next(reusableToken);
-			Assert.AreEqual("qwerty", nextToken.Term(), "SimpleAnalyzer lowercases");
+			termAtt = (TermAttribute) tokenStream.GetAttribute(typeof(TermAttribute));
+			Assert.IsTrue(tokenStream.IncrementToken());
+			Assert.AreEqual("qwerty", termAtt.Term(), "SimpleAnalyzer lowercases");
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStandardAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestStandardAnalyzer.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStandardAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStandardAnalyzer.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -20,50 +20,21 @@
 using NUnit.Framework;
 
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
+using TypeAttribute = Lucene.Net.Analysis.Tokenattributes.TypeAttribute;
 
 namespace Lucene.Net.Analysis
 {
 	
-	[TestFixture]
-	public class TestStandardAnalyzer : LuceneTestCase
+    [TestFixture]
+	public class TestStandardAnalyzer:BaseTokenStreamTestCase
 	{
 		
 		private Analyzer a = new StandardAnalyzer();
 		
-		public virtual void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] expected)
-		{
-			AssertAnalyzesTo(a, input, expected, null);
-		}
-		
-		public virtual void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] expectedImages, System.String[] expectedTypes)
-		{
-			AssertAnalyzesTo(a, input, expectedImages, expectedTypes, null);
-		}
-		
-		public virtual void  AssertAnalyzesTo(Analyzer a, System.String input, System.String[] expectedImages, System.String[] expectedTypes, int[] expectedPosIncrs)
-		{
-			TokenStream ts = a.TokenStream("dummy", new System.IO.StringReader(input));
-            Token reusableToken = new Token();
-			for (int i = 0; i < expectedImages.Length; i++)
-			{
-				Token nextToken = ts.Next(reusableToken);
-				Assert.IsNotNull(nextToken);
-				Assert.AreEqual(expectedImages[i], nextToken.Term());
-				if (expectedTypes != null)
-				{
-					Assert.AreEqual(expectedTypes[i], nextToken.Type());
-				}
-				if (expectedPosIncrs != null)
-				{
-					Assert.AreEqual(expectedPosIncrs[i], nextToken.GetPositionIncrement());
-				}
-			}
-			Assert.IsNull(ts.Next(reusableToken));
-			ts.Close();
-		}
-		
-		[Test]
+        [Test]
 		public virtual void  TestMaxTermLength()
 		{
 			StandardAnalyzer sa = new StandardAnalyzer();
@@ -71,17 +42,17 @@
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "xy", "z"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestMaxTermLength2()
 		{
 			StandardAnalyzer sa = new StandardAnalyzer();
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "toolong", "xy", "z"});
 			sa.SetMaxTokenLength(5);
 			
-			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "xy", "z"}, null, new int[]{1, 1, 2, 1});
+			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "xy", "z"}, new int[]{1, 1, 2, 1});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestMaxTermLength3()
 		{
 			char[] chars = new char[255];
@@ -93,7 +64,7 @@
 			AssertAnalyzesTo(a, "ab cd " + longTerm + "a xy z", new System.String[]{"ab", "cd", "xy", "z"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestAlphanumeric()
 		{
 			// alphanumeric tokens
@@ -101,7 +72,7 @@
 			AssertAnalyzesTo(a, "2B", new System.String[]{"2b"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestUnderscores()
 		{
 			// underscores are delimiters, but not in email addresses (below)
@@ -109,7 +80,7 @@
 			AssertAnalyzesTo(a, "word_with_underscore_and_stopwords", new System.String[]{"word", "underscore", "stopwords"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestDelimiters()
 		{
 			// other delimiters: "-", "/", ","
@@ -118,7 +89,7 @@
 			AssertAnalyzesTo(a, "ac/dc", new System.String[]{"ac", "dc"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestApostrophes()
 		{
 			// internal apostrophes: O'Reilly, you're, O'Reilly's
@@ -131,7 +102,7 @@
 			AssertAnalyzesTo(a, "O'Reilly's", new System.String[]{"o'reilly"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestTSADash()
 		{
 			// t and s had been stopwords in Lucene <= 2.0, which made it impossible
@@ -142,7 +113,7 @@
 			AssertAnalyzesTo(a, "a-class", new System.String[]{"class"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestCompanyNames()
 		{
 			// company names
@@ -150,7 +121,7 @@
 			AssertAnalyzesTo(a, "Excite@Home", new System.String[]{"excite@home"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestLucene1140()
 		{
 			try
@@ -158,28 +129,27 @@
 				StandardAnalyzer analyzer = new StandardAnalyzer(true);
 				AssertAnalyzesTo(analyzer, "www.nutch.org.", new System.String[]{"www.nutch.org"}, new System.String[]{"<HOST>"});
 			}
-			catch (System.NullReferenceException)
+			catch (System.NullReferenceException e)
 			{
 				Assert.IsTrue(false, "Should not throw an NPE and it did");
 			}
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestDomainNames()
 		{
-            // don't reuse because we alter its state (SetReplaceInvalidAcronym)
-            StandardAnalyzer a2 = new StandardAnalyzer();
+			// Don't reuse a because we alter its state (setReplaceInvalidAcronym)
+			StandardAnalyzer a2 = new StandardAnalyzer();
 			// domain names
 			AssertAnalyzesTo(a2, "www.nutch.org", new System.String[]{"www.nutch.org"});
 			//Notice the trailing .  See https://issues.apache.org/jira/browse/LUCENE-1068.
-			// the following should be recognized as HOST
+			// the following should be recognized as HOST:
 			AssertAnalyzesTo(a2, "www.nutch.org.", new System.String[]{"www.nutch.org"}, new System.String[]{"<HOST>"});
-			// the following should be recognized as HOST. The code that sets replaceDepAcronym should be removed in the next release.
 			a2.SetReplaceInvalidAcronym(false);
 			AssertAnalyzesTo(a2, "www.nutch.org.", new System.String[]{"wwwnutchorg"}, new System.String[]{"<ACRONYM>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestEMailAddresses()
 		{
 			// email addresses, possibly with underscores, periods, etc
@@ -188,7 +158,7 @@
 			AssertAnalyzesTo(a, "first_lastname@example.com", new System.String[]{"first_lastname@example.com"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestNumeric()
 		{
 			// floating point, serial, model numbers, ip addresses, etc.
@@ -201,14 +171,14 @@
 			AssertAnalyzesTo(a, "a1-b-c3", new System.String[]{"a1-b-c3"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestTextWithNumbers()
 		{
 			// numbers
 			AssertAnalyzesTo(a, "David has 5000 bones", new System.String[]{"david", "has", "5000", "bones"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestVariousText()
 		{
 			// various
@@ -218,14 +188,14 @@
 			AssertAnalyzesTo(a, "\"QUOTED\" word", new System.String[]{"quoted", "word"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestAcronyms()
 		{
 			// acronyms have their dots stripped
 			AssertAnalyzesTo(a, "U.S.A.", new System.String[]{"usa"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestCPlusPlusHash()
 		{
 			// It would be nice to change the grammar in StandardTokenizer.jj to make "C#" and "C++" end up as tokens.
@@ -233,7 +203,7 @@
 			AssertAnalyzesTo(a, "C#", new System.String[]{"c"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestKorean()
 		{
 			// Korean words
@@ -243,43 +213,43 @@
 		// Compliance with the "old" JavaCC-based analyzer, see:
 		// https://issues.apache.org/jira/browse/LUCENE-966#action_12516752
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceFileName()
 		{
 			AssertAnalyzesTo(a, "2004.jpg", new System.String[]{"2004.jpg"}, new System.String[]{"<HOST>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceNumericIncorrect()
 		{
 			AssertAnalyzesTo(a, "62.46", new System.String[]{"62.46"}, new System.String[]{"<HOST>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceNumericLong()
 		{
 			AssertAnalyzesTo(a, "978-0-94045043-1", new System.String[]{"978-0-94045043-1"}, new System.String[]{"<NUM>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceNumericFile()
 		{
 			AssertAnalyzesTo(a, "78academyawards/rules/rule02.html", new System.String[]{"78academyawards/rules/rule02.html"}, new System.String[]{"<NUM>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceNumericWithUnderscores()
 		{
 			AssertAnalyzesTo(a, "2006-03-11t082958z_01_ban130523_rtridst_0_ozabs", new System.String[]{"2006-03-11t082958z_01_ban130523_rtridst_0_ozabs"}, new System.String[]{"<NUM>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceNumericWithDash()
 		{
 			AssertAnalyzesTo(a, "mid-20th", new System.String[]{"mid-20th"}, new System.String[]{"<NUM>"});
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestComplianceManyTokens()
 		{
 			AssertAnalyzesTo(a, "/money.cnn.com/magazines/fortune/fortune_archive/2007/03/19/8402357/index.htm " + "safari-0-sheikh-zayed-grand-mosque.jpg", new System.String[]{"money.cnn.com", "magazines", "fortune", "fortune", "archive/2007/03/19/8402357", "index.htm", "safari-0-sheikh", "zayed", "grand", "mosque.jpg"}, new System.String[]{"<HOST>", "<ALPHANUM>", "<ALPHANUM>", "<ALPHANUM>", "<NUM>", "<HOST>", "<NUM>", "<ALPHANUM>", "<ALPHANUM>", "<HOST>"});
@@ -287,7 +257,7 @@
 		
 		/// <deprecated> this should be removed in the 3.0. 
 		/// </deprecated>
-		[Test]
+        [Test]
 		public virtual void  TestDeprecatedAcronyms()
 		{
 			// test backward compatibility for applications that require the old behavior.

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestStopAnalyzer.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopAnalyzer.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,46 +19,51 @@
 
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 
 namespace Lucene.Net.Analysis
 {
 	
-	[TestFixture]
-	public class TestStopAnalyzer : LuceneTestCase
+    [TestFixture]
+	public class TestStopAnalyzer:BaseTokenStreamTestCase
 	{
 		
-		private StopAnalyzer stop = new StopAnalyzer();
+		private StopAnalyzer stop = new StopAnalyzer(false);
 		private System.Collections.Hashtable inValidTokens = new System.Collections.Hashtable();
 		
+		public TestStopAnalyzer(System.String s):base(s)
+		{
+		}
+		
 		[SetUp]
 		public override void  SetUp()
 		{
 			base.SetUp();
-			stop = new StopAnalyzer();
-			inValidTokens = new System.Collections.Hashtable();
-
-			for (int i = 0; i < StopAnalyzer.ENGLISH_STOP_WORDS.Length; i++)
+			
+			System.Collections.IEnumerator it = StopAnalyzer.ENGLISH_STOP_WORDS_SET.GetEnumerator();
+			while (it.MoveNext())
 			{
-				inValidTokens.Add(StopAnalyzer.ENGLISH_STOP_WORDS[i], StopAnalyzer.ENGLISH_STOP_WORDS[i]);
+				inValidTokens.Add(it.Current, it.Current);
 			}
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestDefaults()
 		{
 			Assert.IsTrue(stop != null);
 			System.IO.StringReader reader = new System.IO.StringReader("This is a test of the english stop analyzer");
 			TokenStream stream = stop.TokenStream("test", reader);
 			Assert.IsTrue(stream != null);
-			Token reusableToken = new Token();
-			for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+			
+			while (stream.IncrementToken())
 			{
-				Assert.IsFalse(inValidTokens.Contains(nextToken.Term()));
+				Assert.IsFalse(inValidTokens.Contains(termAtt.Term()));
 			}
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestStopList()
 		{
 			System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable();
@@ -69,16 +74,18 @@
 			System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the english stop analyzer");
 			TokenStream stream = newStop.TokenStream("test", reader);
 			Assert.IsNotNull(stream);
-			Token reusableToken = new Token();
-			for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+			PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.AddAttribute(typeof(PositionIncrementAttribute));
+			
+			while (stream.IncrementToken())
 			{
-                System.String text = nextToken.Term();
+				System.String text = termAtt.Term();
 				Assert.IsFalse(stopWordsSet.Contains(text));
-                Assert.AreEqual(1, nextToken.GetPositionIncrement()); // by default stop tokenizer does not apply increments.
+				Assert.AreEqual(1, posIncrAtt.GetPositionIncrement()); // by default stop tokenizer does not apply increments.
 			}
 		}
-
-		[Test]
+		
+        [Test]
 		public virtual void  TestStopListPositions()
 		{
 			bool defaultEnable = StopFilter.GetEnablePositionIncrementsDefault();
@@ -95,12 +102,14 @@
 				TokenStream stream = newStop.TokenStream("test", reader);
 				Assert.IsNotNull(stream);
 				int i = 0;
-                Token reusableToken = new Token();
-                for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+				TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+				PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.AddAttribute(typeof(PositionIncrementAttribute));
+				
+				while (stream.IncrementToken())
 				{
-                    System.String text = nextToken.Term();
+					System.String text = termAtt.Term();
 					Assert.IsFalse(stopWordsSet.Contains(text));
-                    Assert.AreEqual(expectedIncr[i++], nextToken.GetPositionIncrement());
+					Assert.AreEqual(expectedIncr[i++], posIncrAtt.GetPositionIncrement());
 				}
 			}
 			finally

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestStopFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestStopFilter.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,64 +19,69 @@
 
 using NUnit.Framework;
 
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using English = Lucene.Net.Util.English;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Analysis
 {
 	
-	/// <author>  yonik
-	/// </author>
-	[TestFixture]
-	public class TestStopFilter : LuceneTestCase
+	
+    [TestFixture]
+	public class TestStopFilter:BaseTokenStreamTestCase
 	{
 		
 		private const bool VERBOSE = false;
 		
 		// other StopFilter functionality is already tested by TestStopAnalyzer
 		
-		[Test]
+        [Test]
 		public virtual void  TestExactCase()
 		{
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
-			TokenStream stream = new StopFilter(new WhitespaceTokenizer(reader), stopWords);
-            Token reusableToken = new Token();
-			Assert.AreEqual("Now", stream.Next(reusableToken).Term());
-			Assert.AreEqual("The", stream.Next(reusableToken).Term());
-			Assert.AreEqual(null, stream.Next(reusableToken));
+			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopWords);
+			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+			Assert.IsTrue(stream.IncrementToken());
+			Assert.AreEqual("Now", termAtt.Term());
+			Assert.IsTrue(stream.IncrementToken());
+			Assert.AreEqual("The", termAtt.Term());
+			Assert.IsFalse(stream.IncrementToken());
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestIgnoreCase()
 		{
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
-			TokenStream stream = new StopFilter(new WhitespaceTokenizer(reader), stopWords, true);
-            Token reusableToken = new Token();
-            Assert.AreEqual("Now", stream.Next(reusableToken).Term());
-			Assert.AreEqual(null, stream.Next(reusableToken));
+			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopWords, true);
+			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+			Assert.IsTrue(stream.IncrementToken());
+			Assert.AreEqual("Now", termAtt.Term());
+			Assert.IsFalse(stream.IncrementToken());
 		}
 		
-		[Test]
+        [Test]
 		public virtual void  TestStopFilt()
 		{
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
 			System.Collections.Hashtable stopSet = StopFilter.MakeStopSet(stopWords);
-			TokenStream stream = new StopFilter(new WhitespaceTokenizer(reader), stopSet);
-            Token reusableToken = new Token();
-            Assert.AreEqual("Now", stream.Next(reusableToken).Term());
-			Assert.AreEqual("The", stream.Next(reusableToken).Term());
-			Assert.AreEqual(null, stream.Next(reusableToken));
+			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet);
+			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+			Assert.IsTrue(stream.IncrementToken());
+			Assert.AreEqual("Now", termAtt.Term());
+			Assert.IsTrue(stream.IncrementToken());
+			Assert.AreEqual("The", termAtt.Term());
+			Assert.IsFalse(stream.IncrementToken());
 		}
 		
 		/// <summary> Test Position increments applied by StopFilter with and without enabling this option.</summary>
-		[Test]
+        [Test]
 		public virtual void  TestStopPositons()
 		{
 			System.Text.StringBuilder sb = new System.Text.StringBuilder();
-			System.Collections.Generic.List<string> a = new System.Collections.Generic.List<string>();
+			System.Collections.ArrayList a = new System.Collections.ArrayList();
 			for (int i = 0; i < 20; i++)
 			{
 				System.String w = English.IntToEnglish(i).Trim();
@@ -91,15 +96,15 @@
 			System.Collections.Hashtable stopSet = StopFilter.MakeStopSet(stopWords);
 			// with increments
 			System.IO.StringReader reader = new System.IO.StringReader(sb.ToString());
-			StopFilter stpf = new StopFilter(new WhitespaceTokenizer(reader), stopSet);
+			StopFilter stpf = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet);
 			DoTestStopPositons(stpf, true);
 			// without increments
 			reader = new System.IO.StringReader(sb.ToString());
-			stpf = new StopFilter(new WhitespaceTokenizer(reader), stopSet);
+			stpf = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet);
 			DoTestStopPositons(stpf, false);
 			// with increments, concatenating two stop filters
-			System.Collections.Generic.List<string> a0 = new System.Collections.Generic.List<string>();
-			System.Collections.Generic.List<string> a1 = new System.Collections.Generic.List<string>();
+			System.Collections.ArrayList a0 = new System.Collections.ArrayList();
+			System.Collections.ArrayList a1 = new System.Collections.ArrayList();
 			for (int i = 0; i < a.Count; i++)
 			{
 				if (i % 2 == 0)
@@ -120,9 +125,9 @@
 			System.Collections.Hashtable stopSet0 = StopFilter.MakeStopSet(stopWords0);
 			System.Collections.Hashtable stopSet1 = StopFilter.MakeStopSet(stopWords1);
 			reader = new System.IO.StringReader(sb.ToString());
-			StopFilter stpf0 = new StopFilter(new WhitespaceTokenizer(reader), stopSet0); // first part of the set
+			StopFilter stpf0 = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet0); // first part of the set
 			stpf0.SetEnablePositionIncrements(true);
-			StopFilter stpf01 = new StopFilter(stpf0, stopSet1); // two stop filters concatenated!
+			StopFilter stpf01 = new StopFilter(false, stpf0, stopSet1); // two stop filters concatenated!
 			DoTestStopPositons(stpf01, true);
 		}
 		
@@ -130,16 +135,17 @@
 		{
 			Log("---> test with enable-increments-" + (enableIcrements?"enabled":"disabled"));
 			stpf.SetEnablePositionIncrements(enableIcrements);
-            Token reusableToken = new Token();
-            for (int i = 0; i < 20; i += 3)
+			TermAttribute termAtt = (TermAttribute) stpf.GetAttribute(typeof(TermAttribute));
+			PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stpf.GetAttribute(typeof(PositionIncrementAttribute));
+			for (int i = 0; i < 20; i += 3)
 			{
-				Token nextToken = stpf.Next(reusableToken);
-				Log("Token " + i + ": " + nextToken);
+				Assert.IsTrue(stpf.IncrementToken());
+				Log("Token " + i + ": " + stpf);
 				System.String w = English.IntToEnglish(i).Trim();
-				Assert.AreEqual(w, nextToken.Term(), "expecting token " + i + " to be " + w);
-				Assert.AreEqual(enableIcrements ? (i == 0 ? 1 : 3) : 1, nextToken.GetPositionIncrement(), "all but first token must have position increment of 3");
+				Assert.AreEqual(w, termAtt.Term(), "expecting token " + i + " to be " + w);
+				Assert.AreEqual(enableIcrements?(i == 0?1:3):1, posIncrAtt.GetPositionIncrement(), "all but first token must have position increment of 3");
 			}
-			Assert.IsNull(stpf.Next(reusableToken));
+			Assert.IsFalse(stpf.IncrementToken());
 		}
 		
 		// print debug info depending on VERBOSE

Added: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestTeeSinkTokenFilter.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,333 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using StandardFilter = Lucene.Net.Analysis.Standard.StandardFilter;
+using StandardTokenizer = Lucene.Net.Analysis.Standard.StandardTokenizer;
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
+using AttributeSource = Lucene.Net.Util.AttributeSource;
+using English = Lucene.Net.Util.English;
+
+namespace Lucene.Net.Analysis
+{
+	
+	/// <summary> tests for the TestTeeSinkTokenFilter</summary>
+    [TestFixture]
+	public class TestTeeSinkTokenFilter:BaseTokenStreamTestCase
+	{
+		public class AnonymousClassSinkFilter:TeeSinkTokenFilter.SinkFilter
+		{
+			public override bool Accept(AttributeSource a)
+			{
+				TermAttribute termAtt = (TermAttribute) a.GetAttribute(typeof(TermAttribute));
+				return termAtt.Term().ToUpper().Equals("The".ToUpper());
+			}
+		}
+		public class AnonymousClassSinkFilter1:TeeSinkTokenFilter.SinkFilter
+		{
+			public override bool Accept(AttributeSource a)
+			{
+				TermAttribute termAtt = (TermAttribute) a.GetAttribute(typeof(TermAttribute));
+				return termAtt.Term().ToUpper().Equals("Dogs".ToUpper());
+			}
+		}
+		protected internal System.Text.StringBuilder buffer1;
+		protected internal System.Text.StringBuilder buffer2;
+		protected internal System.String[] tokens1;
+		protected internal System.String[] tokens2;
+		
+		
+		public TestTeeSinkTokenFilter(System.String s):base(s)
+		{
+		}
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			tokens1 = new System.String[]{"The", "quick", "Burgundy", "Fox", "jumped", "over", "the", "lazy", "Red", "Dogs"};
+			tokens2 = new System.String[]{"The", "Lazy", "Dogs", "should", "stay", "on", "the", "porch"};
+			buffer1 = new System.Text.StringBuilder();
+			
+			for (int i = 0; i < tokens1.Length; i++)
+			{
+				buffer1.Append(tokens1[i]).Append(' ');
+			}
+			buffer2 = new System.Text.StringBuilder();
+			for (int i = 0; i < tokens2.Length; i++)
+			{
+				buffer2.Append(tokens2[i]).Append(' ');
+			}
+		}
+		
+		internal static readonly TeeSinkTokenFilter.SinkFilter theFilter;
+		
+		internal static readonly TeeSinkTokenFilter.SinkFilter dogFilter;
+		
+		
+		[Test]
+		public virtual void  TestGeneral()
+		{
+			TeeSinkTokenFilter source = new TeeSinkTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())));
+			TokenStream sink1 = source.NewSinkTokenStream();
+			TokenStream sink2 = source.NewSinkTokenStream(theFilter);
+			int i = 0;
+			TermAttribute termAtt = (TermAttribute) source.GetAttribute(typeof(TermAttribute));
+			while (source.IncrementToken())
+			{
+				Assert.AreEqual(tokens1[i], termAtt.Term());
+				i++;
+			}
+			Assert.AreEqual(tokens1.Length, i);
+			
+			i = 0;
+			termAtt = (TermAttribute) sink1.GetAttribute(typeof(TermAttribute));
+			while (sink1.IncrementToken())
+			{
+				Assert.AreEqual(tokens1[i], termAtt.Term());
+				i++;
+			}
+			Assert.AreEqual(tokens1.Length, i);
+			
+			i = 0;
+			termAtt = (TermAttribute) sink2.GetAttribute(typeof(TermAttribute));
+			while (sink2.IncrementToken())
+			{
+				Assert.IsTrue(termAtt.Term().ToUpper().Equals("The".ToUpper()));
+				i++;
+			}
+			Assert.AreEqual(2, i, "there should be two times 'the' in the stream");
+		}
+		
+		[Test]
+		public virtual void  TestMultipleSources()
+		{
+			TeeSinkTokenFilter tee1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())));
+			TeeSinkTokenFilter.SinkTokenStream dogDetector = tee1.NewSinkTokenStream(dogFilter);
+			TeeSinkTokenFilter.SinkTokenStream theDetector = tee1.NewSinkTokenStream(theFilter);
+			TokenStream source1 = new CachingTokenFilter(tee1);
+			
+			TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer2.ToString())));
+			tee2.AddSinkTokenStream(dogDetector);
+			tee2.AddSinkTokenStream(theDetector);
+			TokenStream source2 = tee2;
+			
+			int i = 0;
+			TermAttribute termAtt = (TermAttribute) source1.GetAttribute(typeof(TermAttribute));
+			while (source1.IncrementToken())
+			{
+				Assert.AreEqual(tokens1[i], termAtt.Term());
+				i++;
+			}
+			Assert.AreEqual(tokens1.Length, i);
+			i = 0;
+			termAtt = (TermAttribute) source2.GetAttribute(typeof(TermAttribute));
+			while (source2.IncrementToken())
+			{
+				Assert.AreEqual(tokens2[i], termAtt.Term());
+				i++;
+			}
+			Assert.AreEqual(tokens2.Length, i);
+			i = 0;
+			termAtt = (TermAttribute) theDetector.GetAttribute(typeof(TermAttribute));
+			while (theDetector.IncrementToken())
+			{
+				Assert.IsTrue(termAtt.Term().ToUpper().Equals("The".ToUpper()), "'" + termAtt.Term() + "' is not equal to 'The'");
+				i++;
+			}
+			Assert.AreEqual(4, i, "there must be 4 times 'The' in the stream");
+			i = 0;
+			termAtt = (TermAttribute) dogDetector.GetAttribute(typeof(TermAttribute));
+			while (dogDetector.IncrementToken())
+			{
+				Assert.IsTrue(termAtt.Term().ToUpper().Equals("Dogs".ToUpper()), "'" + termAtt.Term() + "' is not equal to 'Dogs'");
+				i++;
+			}
+			Assert.AreEqual(2, i, "there must be 2 times 'Dog' in the stream");
+			
+			source1.Reset();
+			TokenStream lowerCasing = new LowerCaseFilter(source1);
+			i = 0;
+			termAtt = (TermAttribute) lowerCasing.GetAttribute(typeof(TermAttribute));
+			while (lowerCasing.IncrementToken())
+			{
+				Assert.AreEqual(tokens1[i].ToLower(), termAtt.Term());
+				i++;
+			}
+			Assert.AreEqual(i, tokens1.Length);
+		}
+		
+		/// <summary> Not an explicit test, just useful to print out some info on performance
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		public virtual void  Performance()
+		{
+			int[] tokCount = new int[]{100, 500, 1000, 2000, 5000, 10000};
+			int[] modCounts = new int[]{1, 2, 5, 10, 20, 50, 100, 200, 500};
+			for (int k = 0; k < tokCount.Length; k++)
+			{
+				System.Text.StringBuilder buffer = new System.Text.StringBuilder();
+				System.Console.Out.WriteLine("-----Tokens: " + tokCount[k] + "-----");
+				for (int i = 0; i < tokCount[k]; i++)
+				{
+					buffer.Append(English.IntToEnglish(i).ToUpper()).Append(' ');
+				}
+				//make sure we produce the same tokens
+				TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))));
+				TokenStream sink = teeStream.NewSinkTokenStream(new ModuloSinkFilter(this, 100));
+				teeStream.ConsumeAllTokens();
+				TokenStream stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
+				TermAttribute tfTok = (TermAttribute) stream.AddAttribute(typeof(TermAttribute));
+				TermAttribute sinkTok = (TermAttribute) sink.AddAttribute(typeof(TermAttribute));
+				for (int i = 0; stream.IncrementToken(); i++)
+				{
+					Assert.IsTrue(sink.IncrementToken());
+					Assert.IsTrue(tfTok.Equals(sinkTok) == true, tfTok + " is not equal to " + sinkTok + " at token: " + i);
+				}
+				
+				//simulate two fields, each being analyzed once, for 20 documents
+				for (int j = 0; j < modCounts.Length; j++)
+				{
+					int tfPos = 0;
+					long start = System.DateTime.Now.Millisecond;
+					for (int i = 0; i < 20; i++)
+					{
+						stream = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
+						PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.GetAttribute(typeof(PositionIncrementAttribute));
+						while (stream.IncrementToken())
+						{
+							tfPos += posIncrAtt.GetPositionIncrement();
+						}
+						stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
+						posIncrAtt = (PositionIncrementAttribute) stream.GetAttribute(typeof(PositionIncrementAttribute));
+						while (stream.IncrementToken())
+						{
+							tfPos += posIncrAtt.GetPositionIncrement();
+						}
+					}
+					long finish = System.DateTime.Now.Millisecond;
+					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Two fields took " + (finish - start) + " ms");
+					int sinkPos = 0;
+					//simulate one field with one sink
+					start = System.DateTime.Now.Millisecond;
+					for (int i = 0; i < 20; i++)
+					{
+						teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))));
+						sink = teeStream.NewSinkTokenStream(new ModuloSinkFilter(this, modCounts[j]));
+						PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) teeStream.GetAttribute(typeof(PositionIncrementAttribute));
+						while (teeStream.IncrementToken())
+						{
+							sinkPos += posIncrAtt.GetPositionIncrement();
+						}
+						//System.out.println("Modulo--------");
+						posIncrAtt = (PositionIncrementAttribute) sink.GetAttribute(typeof(PositionIncrementAttribute));
+						while (sink.IncrementToken())
+						{
+							sinkPos += posIncrAtt.GetPositionIncrement();
+						}
+					}
+					finish = System.DateTime.Now.Millisecond;
+					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) + " ms");
+                    Assert.IsTrue(sinkPos == tfPos, sinkPos + " does not equal: " + tfPos);
+				}
+				System.Console.Out.WriteLine("- End Tokens: " + tokCount[k] + "-----");
+			}
+		}
+		
+		
+		internal class ModuloTokenFilter:TokenFilter
+		{
+			private void  InitBlock(TestTeeSinkTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeSinkTokenFilter enclosingInstance;
+			public TestTeeSinkTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			internal int modCount;
+			
+			internal ModuloTokenFilter(TestTeeSinkTokenFilter enclosingInstance, TokenStream input, int mc):base(input)
+			{
+				InitBlock(enclosingInstance);
+				modCount = mc;
+			}
+			
+			internal int count = 0;
+			
+			//return every 100 tokens
+			public override bool IncrementToken()
+			{
+				bool hasNext;
+				for (hasNext = input.IncrementToken(); hasNext && count % modCount != 0; hasNext = input.IncrementToken())
+				{
+					count++;
+				}
+				count++;
+				return hasNext;
+			}
+		}
+		
+		internal class ModuloSinkFilter:TeeSinkTokenFilter.SinkFilter
+		{
+			private void  InitBlock(TestTeeSinkTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeSinkTokenFilter enclosingInstance;
+			public TestTeeSinkTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal int count = 0;
+			internal int modCount;
+			
+			internal ModuloSinkFilter(TestTeeSinkTokenFilter enclosingInstance, int mc)
+			{
+				InitBlock(enclosingInstance);
+				modCount = mc;
+			}
+			
+			public override bool Accept(AttributeSource a)
+			{
+				bool b = (a != null && count % modCount == 0);
+				count++;
+				return b;
+			}
+		}
+		static TestTeeSinkTokenFilter()
+		{
+			theFilter = new AnonymousClassSinkFilter();
+			dogFilter = new AnonymousClassSinkFilter1();
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTeeTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestTeeTokenFilter.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTeeTokenFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTeeTokenFilter.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,379 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using StandardFilter = Lucene.Net.Analysis.Standard.StandardFilter;
+using StandardTokenizer = Lucene.Net.Analysis.Standard.StandardTokenizer;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Analysis
+{
+	
+	/// <summary> tests for the TeeTokenFilter and SinkTokenizer</summary>
+    [TestFixture]
+	public class TestTeeTokenFilter:LuceneTestCase
+	{
+		private class AnonymousClassSinkTokenizer:SinkTokenizer
+		{
+			private void  InitBlock(TestTeeTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeTokenFilter enclosingInstance;
+			public TestTeeTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassSinkTokenizer(TestTeeTokenFilter enclosingInstance, System.Collections.IList Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			public override void  Add(Token t)
+			{
+				if (t != null && t.Term().ToUpper().Equals("The".ToUpper()))
+				{
+					base.Add(t);
+				}
+			}
+		}
+		private class AnonymousClassSinkTokenizer1:SinkTokenizer
+		{
+			private void  InitBlock(TestTeeTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeTokenFilter enclosingInstance;
+			public TestTeeTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassSinkTokenizer1(TestTeeTokenFilter enclosingInstance, System.Collections.IList Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			public override void  Add(Token t)
+			{
+				if (t != null && t.Term().ToUpper().Equals("The".ToUpper()))
+				{
+					base.Add(t);
+				}
+			}
+		}
+		private class AnonymousClassSinkTokenizer2:SinkTokenizer
+		{
+			private void  InitBlock(TestTeeTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeTokenFilter enclosingInstance;
+			public TestTeeTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassSinkTokenizer2(TestTeeTokenFilter enclosingInstance, System.Collections.IList Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			public override void  Add(Token t)
+			{
+				if (t != null && t.Term().ToUpper().Equals("Dogs".ToUpper()))
+				{
+					base.Add(t);
+				}
+			}
+		}
+		protected internal System.Text.StringBuilder buffer1;
+		protected internal System.Text.StringBuilder buffer2;
+		protected internal System.String[] tokens1;
+		protected internal System.String[] tokens2;
+		
+		
+		public TestTeeTokenFilter(System.String s):base(s)
+		{
+		}
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			tokens1 = new System.String[]{"The", "quick", "Burgundy", "Fox", "jumped", "over", "the", "lazy", "Red", "Dogs"};
+			tokens2 = new System.String[]{"The", "Lazy", "Dogs", "should", "stay", "on", "the", "porch"};
+			buffer1 = new System.Text.StringBuilder();
+			
+			for (int i = 0; i < tokens1.Length; i++)
+			{
+				buffer1.Append(tokens1[i]).Append(' ');
+			}
+			buffer2 = new System.Text.StringBuilder();
+			for (int i = 0; i < tokens2.Length; i++)
+			{
+				buffer2.Append(tokens2[i]).Append(' ');
+			}
+		}
+		
+		[Test]
+		public virtual void  Test()
+		{
+			
+			SinkTokenizer sink1 = new AnonymousClassSinkTokenizer(this, null);
+			TokenStream source = new TeeTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())), sink1);
+			int i = 0;
+			Token reusableToken = new Token();
+			for (Token nextToken = source.Next(reusableToken); nextToken != null; nextToken = source.Next(reusableToken))
+			{
+				Assert.IsTrue(nextToken.Term().Equals(tokens1[i]) == true, nextToken.Term() + " is not equal to " + tokens1[i]);
+				i++;
+			}
+			Assert.IsTrue(i == tokens1.Length, i + " does not equal: " + tokens1.Length);
+			Assert.IsTrue(sink1.GetTokens().Count == 2, "sink1 Size: " + sink1.GetTokens().Count + " is not: " + 2);
+			i = 0;
+			for (Token token = sink1.Next(reusableToken); token != null; token = sink1.Next(reusableToken))
+			{
+				Assert.IsTrue(token.Term().ToUpper().Equals("The".ToUpper()) == true, token.Term() + " is not equal to " + "The");
+				i++;
+			}
+			Assert.IsTrue(i == sink1.GetTokens().Count, i + " does not equal: " + sink1.GetTokens().Count);
+		}
+		
+		[Test]
+		public virtual void  TestMultipleSources()
+		{
+			SinkTokenizer theDetector = new AnonymousClassSinkTokenizer1(this, null);
+			SinkTokenizer dogDetector = new AnonymousClassSinkTokenizer2(this, null);
+			TokenStream source1 = new CachingTokenFilter(new TeeTokenFilter(new TeeTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer1.ToString())), theDetector), dogDetector));
+			TokenStream source2 = new TeeTokenFilter(new TeeTokenFilter(new WhitespaceTokenizer(new System.IO.StringReader(buffer2.ToString())), theDetector), dogDetector);
+			int i = 0;
+			Token reusableToken = new Token();
+			for (Token nextToken = source1.Next(reusableToken); nextToken != null; nextToken = source1.Next(reusableToken))
+			{
+				Assert.IsTrue(nextToken.Term().Equals(tokens1[i]) == true, nextToken.Term() + " is not equal to " + tokens1[i]);
+				i++;
+			}
+			Assert.IsTrue(i == tokens1.Length, i + " does not equal: " + tokens1.Length);
+			Assert.IsTrue(theDetector.GetTokens().Count == 2, "theDetector Size: " + theDetector.GetTokens().Count + " is not: " + 2);
+			Assert.IsTrue(dogDetector.GetTokens().Count == 1, "dogDetector Size: " + dogDetector.GetTokens().Count + " is not: " + 1);
+			i = 0;
+			for (Token nextToken = source2.Next(reusableToken); nextToken != null; nextToken = source2.Next(reusableToken))
+			{
+				Assert.IsTrue(nextToken.Term().Equals(tokens2[i]) == true, nextToken.Term() + " is not equal to " + tokens2[i]);
+				i++;
+			}
+			Assert.IsTrue(i == tokens2.Length, i + " does not equal: " + tokens2.Length);
+			Assert.IsTrue(theDetector.GetTokens().Count == 4, "theDetector Size: " + theDetector.GetTokens().Count + " is not: " + 4);
+			Assert.IsTrue(dogDetector.GetTokens().Count == 2, "dogDetector Size: " + dogDetector.GetTokens().Count + " is not: " + 2);
+			i = 0;
+			for (Token nextToken = theDetector.Next(reusableToken); nextToken != null; nextToken = theDetector.Next(reusableToken))
+			{
+				Assert.IsTrue(nextToken.Term().ToUpper().Equals("The".ToUpper()) == true, nextToken.Term() + " is not equal to " + "The");
+				i++;
+			}
+			Assert.IsTrue(i == theDetector.GetTokens().Count, i + " does not equal: " + theDetector.GetTokens().Count);
+			i = 0;
+			for (Token nextToken = dogDetector.Next(reusableToken); nextToken != null; nextToken = dogDetector.Next(reusableToken))
+			{
+				Assert.IsTrue(nextToken.Term().ToUpper().Equals("Dogs".ToUpper()) == true, nextToken.Term() + " is not equal to " + "Dogs");
+				i++;
+			}
+			Assert.IsTrue(i == dogDetector.GetTokens().Count, i + " does not equal: " + dogDetector.GetTokens().Count);
+			source1.Reset();
+			TokenStream lowerCasing = new LowerCaseFilter(source1);
+			i = 0;
+			for (Token nextToken = lowerCasing.Next(reusableToken); nextToken != null; nextToken = lowerCasing.Next(reusableToken))
+			{
+				Assert.IsTrue(nextToken.Term().Equals(tokens1[i].ToLower()) == true, nextToken.Term() + " is not equal to " + tokens1[i].ToLower());
+				i++;
+			}
+			Assert.IsTrue(i == tokens1.Length, i + " does not equal: " + tokens1.Length);
+		}
+		
+		/// <summary> Not an explicit test, just useful to print out some info on performance
+		/// 
+		/// </summary>
+		/// <throws>  Exception </throws>
+		public virtual void  Performance()
+		{
+			int[] tokCount = new int[]{100, 500, 1000, 2000, 5000, 10000};
+			int[] modCounts = new int[]{1, 2, 5, 10, 20, 50, 100, 200, 500};
+			for (int k = 0; k < tokCount.Length; k++)
+			{
+				System.Text.StringBuilder buffer = new System.Text.StringBuilder();
+				System.Console.Out.WriteLine("-----Tokens: " + tokCount[k] + "-----");
+				for (int i = 0; i < tokCount[k]; i++)
+				{
+					buffer.Append(English.IntToEnglish(i).ToUpper()).Append(' ');
+				}
+				//make sure we produce the same tokens
+				ModuloSinkTokenizer sink = new ModuloSinkTokenizer(this, tokCount[k], 100);
+				Token reusableToken = new Token();
+				TokenStream stream = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
+				while (stream.Next(reusableToken) != null)
+				{
+				}
+				stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
+				System.Collections.IList tmp = new System.Collections.ArrayList();
+				for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+				{
+					tmp.Add(nextToken.Clone());
+				}
+				System.Collections.IList sinkList = sink.GetTokens();
+				Assert.IsTrue(tmp.Count == sinkList.Count, "tmp Size: " + tmp.Count + " is not: " + sinkList.Count);
+				for (int i = 0; i < tmp.Count; i++)
+				{
+					Token tfTok = (Token) tmp[i];
+					Token sinkTok = (Token) sinkList[i];
+					Assert.IsTrue(tfTok.Term().Equals(sinkTok.Term()) == true, tfTok.Term() + " is not equal to " + sinkTok.Term() + " at token: " + i);
+				}
+				//simulate two fields, each being analyzed once, for 20 documents
+				
+				for (int j = 0; j < modCounts.Length; j++)
+				{
+					int tfPos = 0;
+					long start = System.DateTime.Now.Millisecond;
+					for (int i = 0; i < 20; i++)
+					{
+						stream = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
+						for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+						{
+							tfPos += nextToken.GetPositionIncrement();
+						}
+						stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
+						for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+						{
+							tfPos += nextToken.GetPositionIncrement();
+						}
+					}
+					long finish = System.DateTime.Now.Millisecond;
+					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Two fields took " + (finish - start) + " ms");
+					int sinkPos = 0;
+					//simulate one field with one sink
+					start = System.DateTime.Now.Millisecond;
+					for (int i = 0; i < 20; i++)
+					{
+						sink = new ModuloSinkTokenizer(this, tokCount[k], modCounts[j]);
+						stream = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
+						for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+						{
+							sinkPos += nextToken.GetPositionIncrement();
+						}
+						//System.out.println("Modulo--------");
+						stream = sink;
+						for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
+						{
+							sinkPos += nextToken.GetPositionIncrement();
+						}
+					}
+					finish = System.DateTime.Now.Millisecond;
+					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) + " ms");
+					Assert.IsTrue(sinkPos == tfPos, sinkPos + " does not equal: " + tfPos);
+				}
+				System.Console.Out.WriteLine("- End Tokens: " + tokCount[k] + "-----");
+			}
+		}
+		
+		
+		internal class ModuloTokenFilter:TokenFilter
+		{
+			private void  InitBlock(TestTeeTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeTokenFilter enclosingInstance;
+			public TestTeeTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			internal int modCount;
+			
+			internal ModuloTokenFilter(TestTeeTokenFilter enclosingInstance, TokenStream input, int mc):base(input)
+			{
+				InitBlock(enclosingInstance);
+				modCount = mc;
+			}
+			
+			internal int count = 0;
+			
+			//return every 100 tokens
+			public override Token Next(Token reusableToken)
+			{
+				Token nextToken = null;
+				for (nextToken = input.Next(reusableToken); nextToken != null && count % modCount != 0; nextToken = input.Next(reusableToken))
+				{
+					count++;
+				}
+				count++;
+				return nextToken;
+			}
+		}
+		
+		internal class ModuloSinkTokenizer:SinkTokenizer
+		{
+			private void  InitBlock(TestTeeTokenFilter enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTeeTokenFilter enclosingInstance;
+			public TestTeeTokenFilter Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal int count = 0;
+			internal int modCount;
+			
+			
+			internal ModuloSinkTokenizer(TestTeeTokenFilter enclosingInstance, int numToks, int mc)
+			{
+				InitBlock(enclosingInstance);
+				modCount = mc;
+				lst = new System.Collections.ArrayList(numToks % mc);
+			}
+			
+			public override void  Add(Token t)
+			{
+				if (t != null && count % modCount == 0)
+				{
+					base.Add(t);
+				}
+				count++;
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestToken.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestToken.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestToken.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestToken.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,173 +19,228 @@
 
 using NUnit.Framework;
 
+using Payload = Lucene.Net.Index.Payload;
+using TestSimpleAttributeImpls = Lucene.Net.Analysis.Tokenattributes.TestSimpleAttributeImpls;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Analysis
 {
+	
     [TestFixture]
-    public class TestToken : LuceneTestCase
-    {
-        [Test]
-        public void TestCtor()
-        {
-            Token t = new Token();
-            char[] content = "hello".ToCharArray();
-            t.SetTermBuffer(content, 0, content.Length);
-            char[] buf = t.TermBuffer();
-            Assert.AreNotSame(t.TermBuffer(), content);
-            Assert.AreEqual("hello", t.Term());
-            Assert.AreEqual("word", t.Type());
-            Assert.AreEqual(0, t.GetFlags());
-
-            t = new Token(6, 22);
-            t.SetTermBuffer(content, 0, content.Length);
-            Assert.AreEqual("hello", t.Term());
-            Assert.AreEqual("(hello,6,22)", t.ToString());
-            Assert.AreEqual("word", t.Type());
-            Assert.AreEqual(0, t.GetFlags());
-
-            t = new Token(6, 22, 7);
-            t.SetTermBuffer(content, 0, content.Length);
-            Assert.AreEqual("hello", t.Term());
-            Assert.AreEqual("(hello,6,22)", t.ToString());
-            Assert.AreEqual(7, t.GetFlags());
-
-            t = new Token(6, 22, "junk");
-            t.SetTermBuffer(content, 0, content.Length);
-            Assert.AreEqual("hello", t.Term());
-            Assert.AreEqual("(hello,6,22,type=junk)", t.ToString());
-            Assert.AreEqual(0, t.GetFlags());
-        }
-
-        [Test]
-        public void TestResize()
-        {
-            Token t = new Token();
-            char[] content = "hello".ToCharArray();
-            t.SetTermBuffer(content, 0, content.Length);
-            for (int i = 0; i < 2000; i++)
-            {
-                t.ResizeTermBuffer(i);
-                Assert.IsTrue(i <= t.TermBuffer().Length);
-                Assert.AreEqual("hello", t.Term());
-            }
-        }
-
-        [Test]
-        public void TestGrow()
-        {
-            Token t = new Token();
-            System.Text.StringBuilder buf = new System.Text.StringBuilder("ab");
-            for (int i = 0; i < 20; i++)
-            {
-                char[] content = buf.ToString().ToCharArray();
-                t.SetTermBuffer(content, 0, content.Length);
-                Assert.AreEqual(buf.Length, t.TermLength());
-                Assert.AreEqual(buf.ToString(), t.Term());
-                buf.Append(buf.ToString());
-            }
-            Assert.AreEqual(1048576, t.TermLength());
-            Assert.AreEqual(1179654, t.TermBuffer().Length);
-
-            // now as a string, first variant
-            t = new Token();
-            buf = new System.Text.StringBuilder("ab");
-            for (int i = 0; i < 20; i++)
-            {
-                String content = buf.ToString();
-                t.SetTermBuffer(content, 0, content.Length);
-                Assert.AreEqual(content.Length, t.TermLength());
-                Assert.AreEqual(content, t.Term());
-                buf.Append(content);
-            }
-            Assert.AreEqual(1048576, t.TermLength());
-            Assert.AreEqual(1179654, t.TermBuffer().Length);
-
-            // now as a string, second variant
-            t = new Token();
-            buf = new System.Text.StringBuilder("ab");
-            for (int i = 0; i < 20; i++)
-            {
-                String content = buf.ToString();
-                t.SetTermBuffer(content);
-                Assert.AreEqual(content.Length, t.TermLength());
-                Assert.AreEqual(content, t.Term());
-                buf.Append(content);
-            }
-            Assert.AreEqual(1048576, t.TermLength());
-            Assert.AreEqual(1179654, t.TermBuffer().Length);
-
-            // Test for slow growth to a long term
-            t = new Token();
-            buf = new System.Text.StringBuilder("a");
-            for (int i = 0; i < 20000; i++)
-            {
-                String content = buf.ToString();
-                t.SetTermBuffer(content);
-                Assert.AreEqual(content.Length, t.TermLength());
-                Assert.AreEqual(content, t.Term());
-                buf.Append("a");
-            }
-            Assert.AreEqual(20000, t.TermLength());
-            Assert.AreEqual(20331, t.TermBuffer().Length);
-
-            // Test for slow growth to a long term
-            t = new Token();
-            buf = new System.Text.StringBuilder("a");
-            for (int i = 0; i < 20000; i++)
-            {
-                String content = buf.ToString();
-                t.SetTermBuffer(content);
-                Assert.AreEqual(content.Length, t.TermLength());
-                Assert.AreEqual(content, t.Term());
-                buf.Append("a");
-            }
-            Assert.AreEqual(20000, t.TermLength());
-            Assert.AreEqual(20331, t.TermBuffer().Length);
-        }
-
-        [Test]
-        public virtual void TestToString()
-        {
-            char[] b = new char[] { 'a', 'l', 'o', 'h', 'a' };
-            Token t = new Token("", 0, 5);
-            t.SetTermBuffer(b, 0, 5);
-            Assert.AreEqual("(aloha,0,5)", t.ToString());
-
-            t.SetTermText("hi there");
-            Assert.AreEqual("(hi there,0,5)", t.ToString());
-        }
-
-        [Test]
-        public virtual void TestMixedStringArray()
-        {
-            Token t = new Token("hello", 0, 5);
-            Assert.AreEqual(t.TermText(), "hello");
-            Assert.AreEqual(t.TermLength(), 5);
-            Assert.AreEqual(new System.String(t.TermBuffer(), 0, 5), "hello");
-            t.SetTermText("hello2");
-            Assert.AreEqual(t.TermLength(), 6);
-            Assert.AreEqual(new System.String(t.TermBuffer(), 0, 6), "hello2");
-            t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
-            Assert.AreEqual(t.TermText(), "hello3");
-
-            // Make sure if we get the buffer and change a character
-            // that termText() reflects the change
-            char[] buffer = t.TermBuffer();
-            buffer[1] = 'o';
-            Assert.AreEqual(t.TermText(), "hollo3");
-        }
-
-        [Test]
-        public void TestClone()
-        {
-            Token t = new Token(0, 5);
-            char[] content = "hello".ToCharArray();
-            t.SetTermBuffer(content, 0, 5);
-            char[] buf = t.TermBuffer();
-            Token copy = (Token)t.Clone();
-            Assert.AreNotSame(buf, copy.TermBuffer());
-        }
-    }
+	public class TestToken:LuceneTestCase
+	{
+		
+		public TestToken(System.String name):base(name)
+		{
+		}
+		
+        [Test]
+		public virtual void  TestCtor()
+		{
+			Token t = new Token();
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, content.Length);
+			char[] buf = t.TermBuffer();
+			Assert.AreNotEqual(t.TermBuffer(), content);
+			Assert.AreEqual("hello", t.Term());
+			Assert.AreEqual("word", t.Type());
+			Assert.AreEqual(0, t.GetFlags());
+			
+			t = new Token(6, 22);
+			t.SetTermBuffer(content, 0, content.Length);
+			Assert.AreEqual("hello", t.Term());
+			Assert.AreEqual("(hello,6,22)", t.ToString());
+			Assert.AreEqual("word", t.Type());
+			Assert.AreEqual(0, t.GetFlags());
+			
+			t = new Token(6, 22, 7);
+			t.SetTermBuffer(content, 0, content.Length);
+			Assert.AreEqual("hello", t.Term());
+			Assert.AreEqual("(hello,6,22)", t.ToString());
+			Assert.AreEqual(7, t.GetFlags());
+			
+			t = new Token(6, 22, "junk");
+			t.SetTermBuffer(content, 0, content.Length);
+			Assert.AreEqual("hello", t.Term());
+			Assert.AreEqual("(hello,6,22,type=junk)", t.ToString());
+			Assert.AreEqual(0, t.GetFlags());
+		}
+		
+        [Test]
+		public virtual void  TestResize()
+		{
+			Token t = new Token();
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, content.Length);
+			for (int i = 0; i < 2000; i++)
+			{
+				t.ResizeTermBuffer(i);
+				Assert.IsTrue(i <= t.TermBuffer().Length);
+				Assert.AreEqual("hello", t.Term());
+			}
+		}
+		
+        [Test]
+		public virtual void  TestGrow()
+		{
+			Token t = new Token();
+			System.Text.StringBuilder buf = new System.Text.StringBuilder("ab");
+			for (int i = 0; i < 20; i++)
+			{
+				char[] content = buf.ToString().ToCharArray();
+				t.SetTermBuffer(content, 0, content.Length);
+				Assert.AreEqual(buf.Length, t.TermLength());
+				Assert.AreEqual(buf.ToString(), t.Term());
+				buf.Append(buf.ToString());
+			}
+			Assert.AreEqual(1048576, t.TermLength());
+			Assert.AreEqual(1179654, t.TermBuffer().Length);
+			
+			// now as a string, first variant
+			t = new Token();
+			buf = new System.Text.StringBuilder("ab");
+			for (int i = 0; i < 20; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content, 0, content.Length);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append(content);
+			}
+			Assert.AreEqual(1048576, t.TermLength());
+			Assert.AreEqual(1179654, t.TermBuffer().Length);
+			
+			// now as a string, second variant
+			t = new Token();
+			buf = new System.Text.StringBuilder("ab");
+			for (int i = 0; i < 20; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append(content);
+			}
+			Assert.AreEqual(1048576, t.TermLength());
+			Assert.AreEqual(1179654, t.TermBuffer().Length);
+			
+			// Test for slow growth to a long term
+			t = new Token();
+			buf = new System.Text.StringBuilder("a");
+			for (int i = 0; i < 20000; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append("a");
+			}
+			Assert.AreEqual(20000, t.TermLength());
+			Assert.AreEqual(20167, t.TermBuffer().Length);
+			
+			// Test for slow growth to a long term
+			t = new Token();
+			buf = new System.Text.StringBuilder("a");
+			for (int i = 0; i < 20000; i++)
+			{
+				System.String content = buf.ToString();
+				t.SetTermBuffer(content);
+				Assert.AreEqual(content.Length, t.TermLength());
+				Assert.AreEqual(content, t.Term());
+				buf.Append("a");
+			}
+			Assert.AreEqual(20000, t.TermLength());
+			Assert.AreEqual(20167, t.TermBuffer().Length);
+		}
+		
+        [Test]
+		public virtual void  TestToString()
+		{
+			char[] b = new char[]{'a', 'l', 'o', 'h', 'a'};
+			Token t = new Token("", 0, 5);
+			t.SetTermBuffer(b, 0, 5);
+			Assert.AreEqual("(aloha,0,5)", t.ToString());
+			
+			t.SetTermText("hi there");
+			Assert.AreEqual("(hi there,0,5)", t.ToString());
+		}
+		
+        [Test]
+		public virtual void  TestTermBufferEquals()
+		{
+			Token t1a = new Token();
+			char[] content1a = "hello".ToCharArray();
+			t1a.SetTermBuffer(content1a, 0, 5);
+			Token t1b = new Token();
+			char[] content1b = "hello".ToCharArray();
+			t1b.SetTermBuffer(content1b, 0, 5);
+			Token t2 = new Token();
+			char[] content2 = "hello2".ToCharArray();
+			t2.SetTermBuffer(content2, 0, 6);
+			Assert.IsTrue(t1a.Equals(t1b));
+			Assert.IsFalse(t1a.Equals(t2));
+			Assert.IsFalse(t2.Equals(t1b));
+		}
+		
+        [Test]
+		public virtual void  TestMixedStringArray()
+		{
+			Token t = new Token("hello", 0, 5);
+			Assert.AreEqual(t.TermText(), "hello");
+			Assert.AreEqual(t.TermLength(), 5);
+			Assert.AreEqual(t.Term(), "hello");
+			t.SetTermText("hello2");
+			Assert.AreEqual(t.TermLength(), 6);
+			Assert.AreEqual(t.Term(), "hello2");
+			t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
+			Assert.AreEqual(t.TermText(), "hello3");
+			
+			// Make sure if we get the buffer and change a character
+			// that termText() reflects the change
+			char[] buffer = t.TermBuffer();
+			buffer[1] = 'o';
+			Assert.AreEqual(t.TermText(), "hollo3");
+		}
+		
+        [Test]
+		public virtual void  TestClone()
+		{
+			Token t = new Token(0, 5);
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, 5);
+			char[] buf = t.TermBuffer();
+			Token copy = (Token) TestSimpleAttributeImpls.AssertCloneIsEqual(t);
+			Assert.AreEqual(t.Term(), copy.Term());
+            Assert.AreNotEqual(buf, copy.TermBuffer());
+			
+			Payload pl = new Payload(new byte[]{1, 2, 3, 4});
+			t.SetPayload(pl);
+			copy = (Token) TestSimpleAttributeImpls.AssertCloneIsEqual(t);
+			Assert.AreEqual(pl, copy.GetPayload());
+			Assert.AreNotEqual(pl, copy.GetPayload());
+		}
+		
+        [Test]
+		public virtual void  TestCopyTo()
+		{
+			Token t = new Token();
+			Token copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
+			Assert.AreEqual("", t.Term());
+			Assert.AreEqual("", copy.Term());
+			
+			t = new Token(0, 5);
+			char[] content = "hello".ToCharArray();
+			t.SetTermBuffer(content, 0, 5);
+			char[] buf = t.TermBuffer();
+			copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
+			Assert.AreEqual(t.Term(), copy.Term());
+			Assert.AreNotEqual(buf, copy.TermBuffer());
+			
+			Payload pl = new Payload(new byte[]{1, 2, 3, 4});
+			t.SetPayload(pl);
+			copy = (Token) TestSimpleAttributeImpls.AssertCopyIsEqual(t);
+			Assert.AreEqual(pl, copy.GetPayload());
+			Assert.AreNotEqual(pl, copy.GetPayload());
+		}
+	}
 }
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTokenStreamBWComp.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/TestTokenStreamBWComp.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTokenStreamBWComp.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/TestTokenStreamBWComp.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,418 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Lucene.Net.Analysis.Tokenattributes;
+using Payload = Lucene.Net.Index.Payload;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Analysis
+{
+	
+	/// <summary>This class tests some special cases of backwards compatibility when using the new TokenStream API with old analyzers </summary>
+    [TestFixture]
+	public class TestTokenStreamBWComp:LuceneTestCase
+	{
+		private class AnonymousClassTokenFilter:TokenFilter
+		{
+			private void  InitBlock(TestTokenStreamBWComp enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTokenStreamBWComp enclosingInstance;
+			public TestTokenStreamBWComp Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			internal AnonymousClassTokenFilter(TestTokenStreamBWComp enclosingInstance, Lucene.Net.Analysis.TokenStream Param1):base(Param1)
+			{
+				InitBlock(enclosingInstance);
+			}
+			// we implement nothing, only un-abstract it
+		}
+		
+		private System.String doc = "This is the new TokenStream api";
+		private System.String[] stopwords = new System.String[]{"is", "the", "this"};
+		
+		[Serializable]
+		public class POSToken:Token
+		{
+			public const int PROPERNOUN = 1;
+			public const int NO_NOUN = 2;
+			
+			private int partOfSpeech;
+			
+			public virtual void  SetPartOfSpeech(int pos)
+			{
+				partOfSpeech = pos;
+			}
+			
+			public virtual int GetPartOfSpeech()
+			{
+				return this.partOfSpeech;
+			}
+		}
+		
+		internal class PartOfSpeechTaggingFilter:TokenFilter
+		{
+			
+			protected internal PartOfSpeechTaggingFilter(TokenStream input):base(input)
+			{
+			}
+			
+			public override Token Next()
+			{
+				Token t = input.Next();
+				if (t == null)
+					return null;
+				
+				POSToken pt = new POSToken();
+				pt.Reinit(t);
+				if (pt.TermLength() > 0)
+				{
+					if (System.Char.IsUpper(pt.TermBuffer()[0]))
+					{
+						pt.SetPartOfSpeech(Lucene.Net.Analysis.TestTokenStreamBWComp.POSToken.PROPERNOUN);
+					}
+					else
+					{
+						pt.SetPartOfSpeech(Lucene.Net.Analysis.TestTokenStreamBWComp.POSToken.NO_NOUN);
+					}
+				}
+				return pt;
+			}
+		}
+		
+		internal class PartOfSpeechAnnotatingFilter:TokenFilter
+		{
+			public const byte PROPER_NOUN_ANNOTATION = 1;
+			
+			
+			protected internal PartOfSpeechAnnotatingFilter(TokenStream input):base(input)
+			{
+			}
+			
+			public override Token Next()
+			{
+				Token t = input.Next();
+				if (t == null)
+					return null;
+				
+				if (t is POSToken)
+				{
+					POSToken pt = (POSToken) t;
+					if (pt.GetPartOfSpeech() == Lucene.Net.Analysis.TestTokenStreamBWComp.POSToken.PROPERNOUN)
+					{
+						pt.SetPayload(new Payload(new byte[]{PROPER_NOUN_ANNOTATION}));
+					}
+					return pt;
+				}
+				else
+				{
+					return t;
+				}
+			}
+		}
+		
+		// test the chain: The one and only term "TokenStream" should be declared as proper noun:
+		
+        [Test]
+		public virtual void  TestTeeSinkCustomTokenNewAPI()
+		{
+			TestTeeSinkCustomToken(0);
+		}
+		
+        [Test]
+		public virtual void  TestTeeSinkCustomTokenOldAPI()
+		{
+			TestTeeSinkCustomToken(1);
+		}
+		
+        [Test]
+		public virtual void  TestTeeSinkCustomTokenVeryOldAPI()
+		{
+			TestTeeSinkCustomToken(2);
+		}
+		
+		private void  TestTeeSinkCustomToken(int api)
+		{
+			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader(doc));
+			stream = new PartOfSpeechTaggingFilter(stream);
+			stream = new LowerCaseFilter(stream);
+			stream = new StopFilter(stream, stopwords);
+			
+			SinkTokenizer sink = new SinkTokenizer();
+			TokenStream stream1 = new PartOfSpeechAnnotatingFilter(sink);
+			
+			stream = new TeeTokenFilter(stream, sink);
+			stream = new PartOfSpeechAnnotatingFilter(stream);
+			
+			switch (api)
+			{
+				
+				case 0: 
+					ConsumeStreamNewAPI(stream);
+					ConsumeStreamNewAPI(stream1);
+					break;
+				
+				case 1: 
+					ConsumeStreamOldAPI(stream);
+					ConsumeStreamOldAPI(stream1);
+					break;
+				
+				case 2: 
+					ConsumeStreamVeryOldAPI(stream);
+					ConsumeStreamVeryOldAPI(stream1);
+					break;
+				}
+		}
+		
+		// test caching the special custom POSToken works in all cases
+		
+        [Test]
+		public virtual void  TestCachingCustomTokenNewAPI()
+		{
+			TestTeeSinkCustomToken(0);
+		}
+		
+        [Test]
+		public virtual void  TestCachingCustomTokenOldAPI()
+		{
+			TestTeeSinkCustomToken(1);
+		}
+		
+        [Test]
+		public virtual void  TestCachingCustomTokenVeryOldAPI()
+		{
+			TestTeeSinkCustomToken(2);
+		}
+		
+        [Test]
+		public virtual void  TestCachingCustomTokenMixed()
+		{
+			TestTeeSinkCustomToken(3);
+		}
+		
+		private void  TestCachingCustomToken(int api)
+		{
+			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader(doc));
+			stream = new PartOfSpeechTaggingFilter(stream);
+			stream = new LowerCaseFilter(stream);
+			stream = new StopFilter(stream, stopwords);
+			stream = new CachingTokenFilter(stream); // <- the caching is done before the annotating!
+			stream = new PartOfSpeechAnnotatingFilter(stream);
+			
+			switch (api)
+			{
+				
+				case 0: 
+					ConsumeStreamNewAPI(stream);
+					ConsumeStreamNewAPI(stream);
+					break;
+				
+				case 1: 
+					ConsumeStreamOldAPI(stream);
+					ConsumeStreamOldAPI(stream);
+					break;
+				
+				case 2: 
+					ConsumeStreamVeryOldAPI(stream);
+					ConsumeStreamVeryOldAPI(stream);
+					break;
+				
+				case 3: 
+					ConsumeStreamNewAPI(stream);
+					ConsumeStreamOldAPI(stream);
+					ConsumeStreamVeryOldAPI(stream);
+					ConsumeStreamNewAPI(stream);
+					ConsumeStreamVeryOldAPI(stream);
+					break;
+				}
+		}
+		
+		private static void  ConsumeStreamNewAPI(TokenStream stream)
+		{
+			stream.Reset();
+			PayloadAttribute payloadAtt = (PayloadAttribute) stream.AddAttribute(typeof(PayloadAttribute));
+			TermAttribute termAtt = (TermAttribute) stream.AddAttribute(typeof(TermAttribute));
+			
+			while (stream.IncrementToken())
+			{
+				System.String term = termAtt.Term();
+				Payload p = payloadAtt.GetPayload();
+				if (p != null && p.GetData().Length == 1 && p.GetData()[0] == PartOfSpeechAnnotatingFilter.PROPER_NOUN_ANNOTATION)
+				{
+					Assert.IsTrue("tokenstream".Equals(term), "only TokenStream is a proper noun");
+				}
+				else
+				{
+					Assert.IsFalse("tokenstream".Equals(term), "all other tokens (if this test fails, the special POSToken subclass is not correctly passed through the chain)");
+				}
+			}
+		}
+		
+		private static void  ConsumeStreamOldAPI(TokenStream stream)
+		{
+			stream.Reset();
+			Token reusableToken = new Token();
+			
+			while ((reusableToken = stream.Next(reusableToken)) != null)
+			{
+				System.String term = reusableToken.Term();
+				Payload p = reusableToken.GetPayload();
+				if (p != null && p.GetData().Length == 1 && p.GetData()[0] == PartOfSpeechAnnotatingFilter.PROPER_NOUN_ANNOTATION)
+				{
+					Assert.IsTrue("tokenstream".Equals(term), "only TokenStream is a proper noun");
+				}
+				else
+				{
+					Assert.IsFalse("tokenstream".Equals(term), "all other tokens (if this test fails, the special POSToken subclass is not correctly passed through the chain)");
+				}
+			}
+		}
+		
+		private static void  ConsumeStreamVeryOldAPI(TokenStream stream)
+		{
+			stream.Reset();
+			
+			Token token;
+			while ((token = stream.Next()) != null)
+			{
+				System.String term = token.Term();
+				Payload p = token.GetPayload();
+				if (p != null && p.GetData().Length == 1 && p.GetData()[0] == PartOfSpeechAnnotatingFilter.PROPER_NOUN_ANNOTATION)
+				{
+					Assert.IsTrue("tokenstream".Equals(term), "only TokenStream is a proper noun");
+				}
+				else
+				{
+					Assert.IsFalse("tokenstream".Equals(term), "all other tokens (if this test fails, the special POSToken subclass is not correctly passed through the chain)");
+				}
+			}
+		}
+		
+		// test if tokenization fails, if only the new API is allowed and an old TokenStream is in the chain
+        [Test]
+		public virtual void  TestOnlyNewAPI()
+		{
+			TokenStream.SetOnlyUseNewAPI(true);
+			try
+			{
+				
+				// this should fail with UOE
+				try
+				{
+					TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader(doc));
+					stream = new PartOfSpeechTaggingFilter(stream); // <-- this one is evil!
+					stream = new LowerCaseFilter(stream);
+					stream = new StopFilter(stream, stopwords);
+					while (stream.IncrementToken())
+						;
+					Assert.Fail("If only the new API is allowed, this should fail with an UOE");
+				}
+				catch (System.NotSupportedException uoe)
+				{
+					Assert.IsTrue((typeof(PartOfSpeechTaggingFilter).FullName + " does not implement incrementToken() which is needed for onlyUseNewAPI.").Equals(uoe.Message));
+				}
+				
+				// this should pass, as all core token streams support the new API
+				TokenStream stream2 = new WhitespaceTokenizer(new System.IO.StringReader(doc));
+				stream2 = new LowerCaseFilter(stream2);
+				stream2 = new StopFilter(stream2, stopwords);
+				while (stream2.IncrementToken())
+					;
+				
+				// Test, if all attributes are implemented by their implementation, not Token/TokenWrapper
+				Assert.IsTrue(stream2.AddAttribute(typeof(TermAttribute)) is TermAttributeImpl, "TermAttribute is implemented by TermAttributeImpl");
+				Assert.IsTrue(stream2.AddAttribute(typeof(OffsetAttribute)) is OffsetAttributeImpl, "OffsetAttribute is implemented by OffsetAttributeImpl");
+				Assert.IsTrue(stream2.AddAttribute(typeof(Lucene.Net.Analysis.Tokenattributes.FlagsAttribute)) is FlagsAttributeImpl, "FlagsAttribute is implemented by FlagsAttributeImpl");
+				Assert.IsTrue(stream2.AddAttribute(typeof(PayloadAttribute)) is PayloadAttributeImpl, "PayloadAttribute is implemented by PayloadAttributeImpl");
+				Assert.IsTrue(stream2.AddAttribute(typeof(PositionIncrementAttribute)) is PositionIncrementAttributeImpl, "PositionIncrementAttribute is implemented by PositionIncrementAttributeImpl");
+				Assert.IsTrue(stream2.AddAttribute(typeof(TypeAttribute)) is TypeAttributeImpl, "TypeAttribute is implemented by TypeAttributeImpl");
+				
+				// try to call old API, this should fail
+				try
+				{
+					stream2.Reset();
+					Token reusableToken = new Token();
+					while ((reusableToken = stream2.Next(reusableToken)) != null)
+						;
+					Assert.Fail("If only the new API is allowed, this should fail with an UOE");
+				}
+				catch (System.NotSupportedException uoe)
+				{
+					Assert.IsTrue("This TokenStream only supports the new Attributes API.".Equals(uoe.Message));
+				}
+				try
+				{
+					stream2.Reset();
+					while (stream2.Next() != null)
+						;
+					Assert.Fail("If only the new API is allowed, this should fail with an UOE");
+				}
+				catch (System.NotSupportedException uoe)
+				{
+					Assert.IsTrue("This TokenStream only supports the new Attributes API.".Equals(uoe.Message));
+				}
+				
+				// Test if the wrapper API (onlyUseNewAPI==false) uses TokenWrapper
+				// as attribute instance.
+				// TokenWrapper encapsulates a Token instance that can be exchanged
+				// by another Token instance without changing the AttributeImpl instance
+				// itsself.
+				TokenStream.SetOnlyUseNewAPI(false);
+				stream2 = new WhitespaceTokenizer(new System.IO.StringReader(doc));
+				Assert.IsTrue(stream2.AddAttribute(typeof(TermAttribute)) is TokenWrapper, "TermAttribute is implemented by TokenWrapper");
+				Assert.IsTrue(stream2.AddAttribute(typeof(OffsetAttribute)) is TokenWrapper, "OffsetAttribute is implemented by TokenWrapper");
+				Assert.IsTrue(stream2.AddAttribute(typeof(Lucene.Net.Analysis.Tokenattributes.FlagsAttribute)) is TokenWrapper, "FlagsAttribute is implemented by TokenWrapper");
+				Assert.IsTrue(stream2.AddAttribute(typeof(PayloadAttribute)) is TokenWrapper, "PayloadAttribute is implemented by TokenWrapper");
+				Assert.IsTrue(stream2.AddAttribute(typeof(PositionIncrementAttribute)) is TokenWrapper, "PositionIncrementAttribute is implemented by TokenWrapper");
+				Assert.IsTrue(stream2.AddAttribute(typeof(TypeAttribute)) is TokenWrapper, "TypeAttribute is implemented by TokenWrapper");
+			}
+			finally
+			{
+				TokenStream.SetOnlyUseNewAPI(false);
+			}
+		}
+		
+        [Test]
+		public virtual void  TestOverridesAny()
+		{
+			try
+			{
+				TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader(doc));
+				stream = new AnonymousClassTokenFilter(this, stream);
+				stream = new LowerCaseFilter(stream);
+				stream = new StopFilter(stream, stopwords);
+				while (stream.IncrementToken())
+					;
+				Assert.Fail("One TokenFilter does not override any of the required methods, so it should fail.");
+			}
+			catch (System.NotSupportedException uoe)
+			{
+				Assert.IsTrue(uoe.Message.EndsWith("does not implement any of incrementToken(), next(Token), next()."));
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Analysis/Tokenattributes/TestSimpleAttributeImpls.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,155 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using Payload = Lucene.Net.Index.Payload;
+using AttributeImpl = Lucene.Net.Util.AttributeImpl;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Analysis.Tokenattributes
+{
+	
+    [TestFixture]
+	public class TestSimpleAttributeImpls:LuceneTestCase
+	{
+		
+		public TestSimpleAttributeImpls(System.String name):base(name)
+		{
+		}
+		
+        [Test]
+		public virtual void  TestFlagsAttribute()
+		{
+			FlagsAttributeImpl att = new FlagsAttributeImpl();
+			Assert.AreEqual(0, att.GetFlags());
+			
+			att.SetFlags(1234);
+			Assert.AreEqual("flags=1234", att.ToString());
+			
+			FlagsAttributeImpl att2 = (FlagsAttributeImpl) AssertCloneIsEqual(att);
+			Assert.AreEqual(1234, att2.GetFlags());
+			
+			att2 = (FlagsAttributeImpl) AssertCopyIsEqual(att);
+			Assert.AreEqual(1234, att2.GetFlags());
+			
+			att.Clear();
+			Assert.AreEqual(0, att.GetFlags());
+		}
+		
+        [Test]
+		public virtual void  TestPositionIncrementAttribute()
+		{
+			PositionIncrementAttributeImpl att = new PositionIncrementAttributeImpl();
+			Assert.AreEqual(1, att.GetPositionIncrement());
+			
+			att.SetPositionIncrement(1234);
+			Assert.AreEqual("positionIncrement=1234", att.ToString());
+			
+			PositionIncrementAttributeImpl att2 = (PositionIncrementAttributeImpl) AssertCloneIsEqual(att);
+			Assert.AreEqual(1234, att2.GetPositionIncrement());
+			
+			att2 = (PositionIncrementAttributeImpl) AssertCopyIsEqual(att);
+			Assert.AreEqual(1234, att2.GetPositionIncrement());
+			
+			att.Clear();
+			Assert.AreEqual(1, att.GetPositionIncrement());
+		}
+		
+        [Test]
+		public virtual void  TestTypeAttribute()
+		{
+			TypeAttributeImpl att = new TypeAttributeImpl();
+			Assert.AreEqual(TypeAttributeImpl.DEFAULT_TYPE, att.Type());
+			
+			att.SetType("hallo");
+			Assert.AreEqual("type=hallo", att.ToString());
+			
+			TypeAttributeImpl att2 = (TypeAttributeImpl) AssertCloneIsEqual(att);
+			Assert.AreEqual("hallo", att2.Type());
+			
+			att2 = (TypeAttributeImpl) AssertCopyIsEqual(att);
+			Assert.AreEqual("hallo", att2.Type());
+			
+			att.Clear();
+			Assert.AreEqual(TypeAttributeImpl.DEFAULT_TYPE, att.Type());
+		}
+		
+        [Test]
+		public virtual void  TestPayloadAttribute()
+		{
+			PayloadAttributeImpl att = new PayloadAttributeImpl();
+			Assert.IsNull(att.GetPayload());
+			
+			Payload pl = new Payload(new byte[]{1, 2, 3, 4});
+			att.SetPayload(pl);
+			
+			PayloadAttributeImpl att2 = (PayloadAttributeImpl) AssertCloneIsEqual(att);
+			Assert.AreEqual(pl, att2.GetPayload());
+			Assert.AreNotEqual(pl, att2.GetPayload());
+			
+			att2 = (PayloadAttributeImpl) AssertCopyIsEqual(att);
+			Assert.AreEqual(pl, att2.GetPayload());
+			Assert.AreNotEqual(pl, att2.GetPayload());
+			
+			att.Clear();
+			Assert.IsNull(att.GetPayload());
+		}
+		
+        [Test]
+		public virtual void  TestOffsetAttribute()
+		{
+			OffsetAttributeImpl att = new OffsetAttributeImpl();
+			Assert.AreEqual(0, att.StartOffset());
+			Assert.AreEqual(0, att.EndOffset());
+			
+			att.SetOffset(12, 34);
+			// no string test here, because order unknown
+			
+			OffsetAttributeImpl att2 = (OffsetAttributeImpl) AssertCloneIsEqual(att);
+			Assert.AreEqual(12, att2.StartOffset());
+			Assert.AreEqual(34, att2.EndOffset());
+			
+			att2 = (OffsetAttributeImpl) AssertCopyIsEqual(att);
+			Assert.AreEqual(12, att2.StartOffset());
+			Assert.AreEqual(34, att2.EndOffset());
+			
+			att.Clear();
+			Assert.AreEqual(0, att.StartOffset());
+			Assert.AreEqual(0, att.EndOffset());
+		}
+		
+		public static AttributeImpl AssertCloneIsEqual(AttributeImpl att)
+		{
+			AttributeImpl clone = (AttributeImpl) att.Clone();
+			Assert.AreEqual(att, clone, "Clone must be equal");
+			Assert.AreEqual(att.GetHashCode(), clone.GetHashCode(), "Clone's hashcode must be equal");
+			return clone;
+		}
+		
+		public static AttributeImpl AssertCopyIsEqual(AttributeImpl att)
+		{
+			AttributeImpl copy = (AttributeImpl) System.Activator.CreateInstance(att.GetType());
+			att.CopyTo(copy);
+			Assert.AreEqual(att, copy, "Copied instance must be equal");
+			Assert.AreEqual(att.GetHashCode(), copy.GetHashCode(), "Copied instance's hashcode must be equal");
+			return copy;
+		}
+	}
+}
\ No newline at end of file



Mime
View raw message