lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1201314 - /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/
Date Sat, 12 Nov 2011 20:01:56 GMT
Author: ccurrens
Date: Sat Nov 12 20:01:55 2011
New Revision: 1201314

URL: http://svn.apache.org/viewvc?rev=1201314&view=rev
Log:
Updated tests in core\analysis and core\analysis\tokenattributes

Modified:
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestASCIIFoldingFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestAnalyzers.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCachingTokenFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCharArraySet.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestISOLatin1AccentFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestKeywordAnalyzer.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestLengthFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestNumericTokenStream.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestPerFieldAnalzyerWrapper.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStandardAnalyzer.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopAnalyzer.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopFilter.cs
    incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestToken.cs

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/BaseTokenStreamTestCase.cs
Sat Nov 12 20:01:55 2011
@@ -25,79 +25,16 @@ using LuceneTestCase = Lucene.Net.Util.L
 namespace Lucene.Net.Analysis
 {
 	
-	/// <summary> Base class for all Lucene unit tests that use TokenStreams.  
-	/// <p/>
-	/// This class runs all tests twice, one time with {@link TokenStream#setOnlyUseNewAPI}
<code>false</code>
-	/// and after that one time with <code>true</code>.
-	/// </summary>
+	/// <summary>Base class for all Lucene unit tests that use TokenStreams.</summary>
 	public abstract class BaseTokenStreamTestCase:LuceneTestCase
 	{
-		
-		private bool onlyUseNewAPI = false;
-		private System.Collections.Hashtable testWithNewAPI;
-		
-		public BaseTokenStreamTestCase():base()
-		{
-			this.testWithNewAPI = null; // run all tests also with onlyUseNewAPI
-		}
-		
+	    public BaseTokenStreamTestCase()
+	    { }
+
 		public BaseTokenStreamTestCase(System.String name):base(name)
-		{
-			this.testWithNewAPI = null; // run all tests also with onlyUseNewAPI
-		}
-		
-		public BaseTokenStreamTestCase(System.Collections.Hashtable testWithNewAPI):base()
-		{
-			this.testWithNewAPI = testWithNewAPI;
-		}
-		
-		public BaseTokenStreamTestCase(System.String name, System.Collections.Hashtable testWithNewAPI):base(name)
-		{
-			this.testWithNewAPI = testWithNewAPI;
-		}
-		
-		// @Override
-		[SetUp]
-		public override void  SetUp()
-		{
-			base.SetUp();
-			TokenStream.SetOnlyUseNewAPI(onlyUseNewAPI);
-		}
-		
-		// @Override
-		public override void  RunBare()
-		{
-			// Do the test with onlyUseNewAPI=false (default)
-			try
-			{
-				onlyUseNewAPI = false;
-				// base.RunBare();  // {{Aroush-2.9}}
-                System.Diagnostics.Debug.Fail("Port issue:", "base.RunBare()"); // {{Aroush-2.9}}
-			}
-			catch (System.Exception e)
-			{
-				System.Console.Out.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=false");
-				throw e;
-			}
-			
-			if (testWithNewAPI == null || testWithNewAPI.Contains(GetType()))
-			{
-				// Do the test again with onlyUseNewAPI=true
-				try
-				{
-					onlyUseNewAPI = true;
-					base.RunBare();
-				}
-				catch (System.Exception e)
-				{
-					System.Console.Out.WriteLine("Test failure of '" + GetType() + "' occurred with onlyUseNewAPI=true");
-					throw e;
-				}
-			}
-		}
+		{ }
 		
 		// some helpers to test Analyzers and TokenStreams:
-
         public interface CheckClearAttributesAttribute : Lucene.Net.Util.Attribute
         {
                bool GetAndResetClearCalled();
@@ -119,13 +56,11 @@ namespace Lucene.Net.Analysis
                 }
             }
 
-            //@Override
             public override void Clear()
             {
                 clearCalled = true;
             }
 
-            //@Override
             public  override bool Equals(Object other) 
             {
                 return (
@@ -134,15 +69,12 @@ namespace Lucene.Net.Analysis
                 );
             }
 
-
-            //@Override
             public override int GetHashCode()
             {
                 //Java: return 76137213 ^ Boolean.valueOf(clearCalled).hashCode();
                 return 76137213 ^ clearCalled.GetHashCode();
             }
 
-            //@Override
             public override void CopyTo(Lucene.Net.Util.AttributeImpl target)
             {
                 ((CheckClearAttributesAttributeImpl)target).Clear();
@@ -152,30 +84,30 @@ namespace Lucene.Net.Analysis
         public static void AssertTokenStreamContents(TokenStream ts, System.String[] output,
int[] startOffsets, int[] endOffsets, System.String[] types, int[] posIncrements, int? finalOffset)
         {
             Assert.IsNotNull(output);
-            CheckClearAttributesAttribute checkClearAtt = (CheckClearAttributesAttribute)ts.AddAttribute(typeof(CheckClearAttributesAttribute));
+            CheckClearAttributesAttribute checkClearAtt = ts.AddAttribute<CheckClearAttributesAttribute>();
 
-            Assert.IsTrue(ts.HasAttribute(typeof(TermAttribute)), "has no TermAttribute");
-            TermAttribute termAtt = (TermAttribute)ts.GetAttribute(typeof(TermAttribute));
+            Assert.IsTrue(ts.HasAttribute<TermAttribute>(), "has no TermAttribute");
+            TermAttribute termAtt = ts.GetAttribute<TermAttribute>();
 
             OffsetAttribute offsetAtt = null;
             if (startOffsets != null || endOffsets != null || finalOffset != null)
             {
-                Assert.IsTrue(ts.HasAttribute(typeof(OffsetAttribute)), "has no OffsetAttribute");
-                offsetAtt = (OffsetAttribute)ts.GetAttribute(typeof(OffsetAttribute));
+                Assert.IsTrue(ts.HasAttribute<OffsetAttribute>(), "has no OffsetAttribute");
+                offsetAtt = ts.GetAttribute<OffsetAttribute>();
             }
     
             TypeAttribute typeAtt = null;
             if (types != null)
             {
-                Assert.IsTrue(ts.HasAttribute(typeof(TypeAttribute)), "has no TypeAttribute");
-                typeAtt = (TypeAttribute)ts.GetAttribute(typeof(TypeAttribute));
+                Assert.IsTrue(ts.HasAttribute<TypeAttribute>(), "has no TypeAttribute");
+                typeAtt = ts.GetAttribute<TypeAttribute>();
             }
             
             PositionIncrementAttribute posIncrAtt = null;
             if (posIncrements != null)
             {
-                Assert.IsTrue(ts.HasAttribute(typeof(PositionIncrementAttribute)), "has no
PositionIncrementAttribute");
-                posIncrAtt = (PositionIncrementAttribute)ts.GetAttribute(typeof(PositionIncrementAttribute));
+                Assert.IsTrue(ts.HasAttribute<PositionIncrementAttribute>(), "has no
PositionIncrementAttribute");
+                posIncrAtt = ts.GetAttribute<PositionIncrementAttribute>();
             }
 
             ts.Reset();

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestASCIIFoldingFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestASCIIFoldingFilter.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestASCIIFoldingFilter.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestASCIIFoldingFilter.cs
Sat Nov 12 20:01:55 2011
@@ -25,7 +25,7 @@ namespace Lucene.Net.Analysis
 {
 	
     [TestFixture]
-	public class TestASCIIFoldingFilter:BaseTokenStreamTestCase
+	public class TestASCIIFoldingFilter : BaseTokenStreamTestCase
 	{
 		
 		// testLain1Accents() is a copy of TestLatin1AccentFilter.testU().
@@ -38,8 +38,8 @@ namespace Lucene.Net.Analysis
                   " ð ñ ò ó ô õ ö ø œ ß þ ù ú û
ü ý ÿ fi fl")
                 );
 			ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream);
-			
-			TermAttribute termAtt = (TermAttribute) filter.GetAttribute(typeof(TermAttribute));
+
+            TermAttribute termAtt = filter.GetAttribute<TermAttribute>();
 			
 			AssertTermEquals("Des", filter, termAtt);
 			AssertTermEquals("mot", filter, termAtt);
@@ -1902,7 +1902,7 @@ namespace Lucene.Net.Analysis
 			
 			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader(inputText.ToString()));
 			ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream);
-			TermAttribute termAtt = (TermAttribute) filter.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = filter.GetAttribute<TermAttribute>();
 			System.Collections.IEnumerator expectedIter = expectedOutputTokens.GetEnumerator();
 			while (expectedIter.MoveNext())
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestAnalyzers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestAnalyzers.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestAnalyzers.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestAnalyzers.cs
Sat Nov 12 20:01:55 2011
@@ -24,17 +24,18 @@ using StandardTokenizer = Lucene.Net.Ana
 using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Payload = Lucene.Net.Index.Payload;
+using Version = Lucene.Net.Util.Version;
 
 namespace Lucene.Net.Analysis
 {
 	
 	[TestFixture]
-	public class TestAnalyzers:BaseTokenStreamTestCase
+	public class TestAnalyzers : BaseTokenStreamTestCase
 	{
-		
-		/*public TestAnalyzers(System.String name):base(name)
+        // NOTE: This was commented out before -cc
+		public TestAnalyzers(System.String name) : base(name)
 		{
-		}*/
+		}
 		
 		[Test]
 		public virtual void  TestSimple()
@@ -67,14 +68,14 @@ namespace Lucene.Net.Analysis
 		[Test]
 		public virtual void  TestStop()
 		{
-			Analyzer a = new StopAnalyzer();
+			Analyzer a = new StopAnalyzer(Version.LUCENE_CURRENT);
 			AssertAnalyzesTo(a, "foo bar FOO BAR", new System.String[]{"foo", "bar", "foo", "bar"});
 			AssertAnalyzesTo(a, "foo a bar such FOO THESE BAR", new System.String[]{"foo", "bar",
"foo", "bar"});
 		}
 		
 		internal virtual void  VerifyPayload(TokenStream ts)
 		{
-			PayloadAttribute payloadAtt = (PayloadAttribute) ts.GetAttribute(typeof(PayloadAttribute));
+            PayloadAttribute payloadAtt = ts.GetAttribute<PayloadAttribute>();
 			for (byte b = 1; ; b++)
 			{
 				bool hasNext = ts.IncrementToken();
@@ -118,6 +119,10 @@ namespace Lucene.Net.Analysis
 		
 		private class MyStandardAnalyzer:StandardAnalyzer
 		{
+		    public MyStandardAnalyzer() : base(Version.LUCENE_CURRENT)
+		    {
+		        
+		    }
 			public override TokenStream TokenStream(System.String field, System.IO.TextReader reader)
 			{
 				return new WhitespaceAnalyzer().TokenStream(field, reader);
@@ -146,7 +151,7 @@ namespace Lucene.Net.Analysis
 		public PayloadSetter(TokenStream input):base(input)
 		{
 			InitBlock();
-			payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
+            payloadAtt = AddAttribute<PayloadAttribute>();
 		}
 		
 		internal byte[] data = new byte[1];

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCachingTokenFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCachingTokenFilter.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCachingTokenFilter.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCachingTokenFilter.cs
Sat Nov 12 20:01:55 2011
@@ -46,8 +46,8 @@ namespace Lucene.Net.Analysis
 			private void  InitBlock(TestCachingTokenFilter enclosingInstance)
 			{
 				this.enclosingInstance = enclosingInstance;
-				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
-				offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
+                termAtt = AddAttribute<TermAttribute>();
+                offsetAtt = AddAttribute<OffsetAttribute>();
 			}
 			private TestCachingTokenFilter enclosingInstance;
 			public TestCachingTokenFilter Enclosing_Instance
@@ -101,7 +101,7 @@ namespace Lucene.Net.Analysis
 			writer.AddDocument(doc);
 			writer.Close();
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, true);
 			TermPositions termPositions = reader.TermPositions(new Term("preanalyzed", "term1"));
 			Assert.IsTrue(termPositions.Next());
 			Assert.AreEqual(1, termPositions.Freq());
@@ -127,8 +127,8 @@ namespace Lucene.Net.Analysis
 		private void  checkTokens(TokenStream stream)
 		{
 			int count = 0;
-			
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
 			Assert.IsNotNull(termAtt);
 			while (stream.IncrementToken())
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCharArraySet.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCharArraySet.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCharArraySet.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestCharArraySet.cs
Sat Nov 12 20:01:55 2011
@@ -63,13 +63,13 @@ namespace Lucene.Net.Analysis
 		{
 			CharArraySet set_Renamed = new CharArraySet(10, true);
 			System.Int32 val = 1;
-			set_Renamed.Add((System.Object) val);
-			Assert.IsTrue(set_Renamed.Contains((System.Object) val));
-			Assert.IsTrue(set_Renamed.Contains((System.Object) 1));
+			set_Renamed.Add(val);
+			Assert.IsTrue(set_Renamed.Contains(val));
+			Assert.IsTrue(set_Renamed.Contains(1));
 			// test unmodifiable
 			set_Renamed = CharArraySet.UnmodifiableSet(set_Renamed);
-			Assert.IsTrue(set_Renamed.Contains((System.Object) val));
-			Assert.IsTrue(set_Renamed.Contains((System.Object) 1));
+			Assert.IsTrue(set_Renamed.Contains(val));
+			Assert.IsTrue(set_Renamed.Contains(1));
 		}
 		
         [Test]

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestISOLatin1AccentFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestISOLatin1AccentFilter.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestISOLatin1AccentFilter.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestISOLatin1AccentFilter.cs
Sat Nov 12 20:01:55 2011
@@ -32,7 +32,7 @@ namespace Lucene.Net.Analysis
 		{
 			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader("Des mot clés
À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î
Ï IJ Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ
à á â ã ä å æ ç è é ê ë ì í î ï ij ð
ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
 			ISOLatin1AccentFilter filter = new ISOLatin1AccentFilter(stream);
-			TermAttribute termAtt = (TermAttribute) filter.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = filter.GetAttribute<TermAttribute>();
 			AssertTermEquals("Des", filter, termAtt);
 			AssertTermEquals("mot", filter, termAtt);
 			AssertTermEquals("cles", filter, termAtt);

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestKeywordAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestKeywordAnalyzer.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestKeywordAnalyzer.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestKeywordAnalyzer.cs
Sat Nov 12 20:01:55 2011
@@ -31,6 +31,7 @@ using RAMDirectory = Lucene.Net.Store.RA
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
 using ScoreDoc = Lucene.Net.Search.ScoreDoc;
+using Version = Lucene.Net.Util.Version;
 
 namespace Lucene.Net.Analysis
 {
@@ -56,7 +57,7 @@ namespace Lucene.Net.Analysis
 			
 			writer.Close();
 			
-			searcher = new IndexSearcher(directory);
+			searcher = new IndexSearcher(directory, true);
 		}
 		
         [Test]
@@ -65,7 +66,7 @@ namespace Lucene.Net.Analysis
 			PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
 			analyzer.AddAnalyzer("partnum", new KeywordAnalyzer());
 			
-			QueryParser queryParser = new QueryParser("description", analyzer);
+			QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, "description", analyzer);
 			Query query = queryParser.Parse("partnum:Q36 AND SPACE");
 			
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
@@ -86,7 +87,7 @@ namespace Lucene.Net.Analysis
 			writer.AddDocument(doc);
 			writer.Close();
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, true);
 			TermDocs td = reader.TermDocs(new Term("partnum", "Q36"));
 			Assert.IsTrue(td.Next());
 			td = reader.TermDocs(new Term("partnum", "Q37"));
@@ -98,7 +99,7 @@ namespace Lucene.Net.Analysis
 		public virtual void  TestOffsets()
 		{
 			TokenStream stream = new KeywordAnalyzer().TokenStream("field", new System.IO.StringReader("abcd"));
-			OffsetAttribute offsetAtt = (OffsetAttribute) stream.AddAttribute(typeof(OffsetAttribute));
+            OffsetAttribute offsetAtt = stream.AddAttribute<OffsetAttribute>();
 			Assert.IsTrue(stream.IncrementToken());
 			Assert.AreEqual(0, offsetAtt.StartOffset());
 			Assert.AreEqual(4, offsetAtt.EndOffset());

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestLengthFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestLengthFilter.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestLengthFilter.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestLengthFilter.cs
Sat Nov 12 20:01:55 2011
@@ -33,7 +33,7 @@ namespace Lucene.Net.Analysis
 		{
 			TokenStream stream = new WhitespaceTokenizer(new System.IO.StringReader("short toolong
evenmuchlongertext a ab toolong foo"));
 			LengthFilter filter = new LengthFilter(stream, 2, 6);
-			TermAttribute termAtt = (TermAttribute) filter.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = filter.GetAttribute<TermAttribute>();
 			
 			Assert.IsTrue(filter.IncrementToken());
 			Assert.AreEqual("short", termAtt.Term());

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestNumericTokenStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestNumericTokenStream.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestNumericTokenStream.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestNumericTokenStream.cs
Sat Nov 12 20:01:55 2011
@@ -38,8 +38,8 @@ namespace Lucene.Net.Analysis
 		{
 			NumericTokenStream stream = new NumericTokenStream().SetLongValue(lvalue);
 			// use getAttribute to test if attributes really exist, if not an IAE will be throwed
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
-			TypeAttribute typeAtt = (TypeAttribute) stream.GetAttribute(typeof(TypeAttribute));
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
+            TypeAttribute typeAtt = stream.GetAttribute<TypeAttribute>();
 			for (int shift = 0; shift < 64; shift += NumericUtils.PRECISION_STEP_DEFAULT)
 			{
 				Assert.IsTrue(stream.IncrementToken(), "New token is available");
@@ -54,8 +54,8 @@ namespace Lucene.Net.Analysis
 		{
 			NumericTokenStream stream = new NumericTokenStream().SetIntValue(ivalue);
 			// use getAttribute to test if attributes really exist, if not an IAE will be throwed
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
-			TypeAttribute typeAtt = (TypeAttribute) stream.GetAttribute(typeof(TypeAttribute));
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
+            TypeAttribute typeAtt = stream.GetAttribute<TypeAttribute>();
 			for (int shift = 0; shift < 32; shift += NumericUtils.PRECISION_STEP_DEFAULT)
 			{
 				Assert.IsTrue(stream.IncrementToken(), "New token is available");

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestPerFieldAnalzyerWrapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestPerFieldAnalzyerWrapper.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestPerFieldAnalzyerWrapper.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestPerFieldAnalzyerWrapper.cs
Sat Nov 12 20:01:55 2011
@@ -35,13 +35,13 @@ namespace Lucene.Net.Analysis
 			analyzer.AddAnalyzer("special", new SimpleAnalyzer());
 			
 			TokenStream tokenStream = analyzer.TokenStream("field", new System.IO.StringReader(text));
-			TermAttribute termAtt = (TermAttribute) tokenStream.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = tokenStream.GetAttribute<TermAttribute>();
 			
 			Assert.IsTrue(tokenStream.IncrementToken());
 			Assert.AreEqual("Qwerty", termAtt.Term(), "WhitespaceAnalyzer does not lowercase");
 			
 			tokenStream = analyzer.TokenStream("special", new System.IO.StringReader(text));
-			termAtt = (TermAttribute) tokenStream.GetAttribute(typeof(TermAttribute));
+            termAtt = tokenStream.GetAttribute<TermAttribute>();
 			Assert.IsTrue(tokenStream.IncrementToken());
 			Assert.AreEqual("qwerty", termAtt.Term(), "SimpleAnalyzer lowercases");
 		}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStandardAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStandardAnalyzer.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStandardAnalyzer.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStandardAnalyzer.cs
Sat Nov 12 20:01:55 2011
@@ -33,12 +33,12 @@ namespace Lucene.Net.Analysis
 	public class TestStandardAnalyzer:BaseTokenStreamTestCase
 	{
 		
-		private Analyzer a = new StandardAnalyzer();
+		private Analyzer a = new StandardAnalyzer(Version.LUCENE_CURRENT);
 		
         [Test]
 		public virtual void  TestMaxTermLength()
 		{
-			StandardAnalyzer sa = new StandardAnalyzer();
+            StandardAnalyzer sa = new StandardAnalyzer(Version.LUCENE_CURRENT);
 			sa.SetMaxTokenLength(5);
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "xy", "z"});
 		}
@@ -46,7 +46,7 @@ namespace Lucene.Net.Analysis
         [Test]
 		public virtual void  TestMaxTermLength2()
 		{
-			StandardAnalyzer sa = new StandardAnalyzer();
+            StandardAnalyzer sa = new StandardAnalyzer(Version.LUCENE_CURRENT);
 			AssertAnalyzesTo(sa, "ab cd toolong xy z", new System.String[]{"ab", "cd", "toolong",
"xy", "z"});
 			sa.SetMaxTokenLength(5);
 			
@@ -127,7 +127,7 @@ namespace Lucene.Net.Analysis
 		{
 			try
 			{
-				StandardAnalyzer analyzer = new StandardAnalyzer(true);
+                StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
 				AssertAnalyzesTo(analyzer, "www.nutch.org.", new System.String[]{"www.nutch.org"}, new
System.String[]{"<HOST>"});
 			}
 			catch (System.NullReferenceException e)
@@ -139,9 +139,6 @@ namespace Lucene.Net.Analysis
         [Test]
 		public virtual void  TestDomainNames()
 		{
-			// Don't reuse a because we alter its state
-			// (setReplaceInvalidAcronym)
-			
 			// Current lucene should not show the bug
 			StandardAnalyzer a2 = new StandardAnalyzer(Version.LUCENE_CURRENT);
 			// domain names
@@ -264,15 +261,5 @@ namespace Lucene.Net.Analysis
 		{
 			AssertAnalyzesTo(a, "/money.cnn.com/magazines/fortune/fortune_archive/2007/03/19/8402357/index.htm
" + "safari-0-sheikh-zayed-grand-mosque.jpg", new System.String[]{"money.cnn.com", "magazines",
"fortune", "fortune", "archive/2007/03/19/8402357", "index.htm", "safari-0-sheikh", "zayed",
"grand", "mosque.jpg"}, new System.String[]{"<HOST>", "<ALPHANUM>", "<ALPHANUM>",
"<ALPHANUM>", "<NUM>", "<HOST>", "<NUM>", "<ALPHANUM>", "<ALPHANUM>",
"<HOST>"});
 		}
-		
-		/// <deprecated> this should be removed in the 3.0. 
-		/// </deprecated>
-        [Test]
-		public virtual void  TestDeprecatedAcronyms()
-		{
-			// test backward compatibility for applications that require the old behavior.
-			// this should be removed once replaceDepAcronym is removed.
-			AssertAnalyzesTo(a, "lucene.apache.org.", new System.String[]{"lucene.apache.org"}, new
System.String[]{"<HOST>"});
-		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopAnalyzer.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopAnalyzer.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopAnalyzer.cs
Sat Nov 12 20:01:55 2011
@@ -21,6 +21,7 @@ using NUnit.Framework;
 
 using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
 using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
+using Version = Lucene.Net.Util.Version;
 
 namespace Lucene.Net.Analysis
 {
@@ -29,7 +30,7 @@ namespace Lucene.Net.Analysis
 	public class TestStopAnalyzer:BaseTokenStreamTestCase
 	{
 		
-		private StopAnalyzer stop = new StopAnalyzer(false);
+		private StopAnalyzer stop = new StopAnalyzer(Version.LUCENE_CURRENT);
 		private System.Collections.Hashtable inValidTokens = new System.Collections.Hashtable();
 		
 		public TestStopAnalyzer(System.String s):base(s)
@@ -59,7 +60,7 @@ namespace Lucene.Net.Analysis
 			System.IO.StringReader reader = new System.IO.StringReader("This is a test of the english
stop analyzer");
 			TokenStream stream = stop.TokenStream("test", reader);
 			Assert.IsTrue(stream != null);
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
 			
 			while (stream.IncrementToken())
 			{
@@ -70,56 +71,47 @@ namespace Lucene.Net.Analysis
         [Test]
 		public virtual void  TestStopList()
 		{
-			System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable();
-			stopWordsSet.Add("good", "good");
-			stopWordsSet.Add("test", "test");
-			stopWordsSet.Add("analyzer", "analyzer");
-			StopAnalyzer newStop = new StopAnalyzer(stopWordsSet);
+			var stopWordsSet = new System.Collections.Generic.HashSet<string>();
+			stopWordsSet.Add("good");
+			stopWordsSet.Add("test");
+			stopWordsSet.Add("analyzer");
+			StopAnalyzer newStop = new StopAnalyzer(Version.LUCENE_24, stopWordsSet);
 			System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the
english stop analyzer");
 			TokenStream stream = newStop.TokenStream("test", reader);
 			Assert.IsNotNull(stream);
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
-			PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.AddAttribute(typeof(PositionIncrementAttribute));
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
+            PositionIncrementAttribute posIncrAtt = stream.AddAttribute<PositionIncrementAttribute>();
 			
 			while (stream.IncrementToken())
 			{
 				System.String text = termAtt.Term();
 				Assert.IsFalse(stopWordsSet.Contains(text));
-				Assert.AreEqual(1, posIncrAtt.GetPositionIncrement()); // by default stop tokenizer does
not apply increments.
+                Assert.AreEqual(1, posIncrAtt.GetPositionIncrement()); // in 2.4 stop tokenizer
does not apply increments.
 			}
 		}
 		
         [Test]
 		public virtual void  TestStopListPositions()
-		{
-			bool defaultEnable = StopFilter.GetEnablePositionIncrementsDefault();
-			StopFilter.SetEnablePositionIncrementsDefault(true);
-			try
-			{
-				System.Collections.Hashtable stopWordsSet = new System.Collections.Hashtable();
-				stopWordsSet.Add("good", "good");
-				stopWordsSet.Add("test", "test");
-				stopWordsSet.Add("analyzer", "analyzer");
-				StopAnalyzer newStop = new StopAnalyzer(stopWordsSet);
-				System.IO.StringReader reader = new System.IO.StringReader("This is a good test of the
english stop analyzer with positions");
-				int[] expectedIncr = new int[]{1, 1, 1, 3, 1, 1, 1, 2, 1};
-				TokenStream stream = newStop.TokenStream("test", reader);
-				Assert.IsNotNull(stream);
-				int i = 0;
-				TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
-				PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stream.AddAttribute(typeof(PositionIncrementAttribute));
-				
-				while (stream.IncrementToken())
-				{
-					System.String text = termAtt.Term();
-					Assert.IsFalse(stopWordsSet.Contains(text));
-					Assert.AreEqual(expectedIncr[i++], posIncrAtt.GetPositionIncrement());
-				}
-			}
-			finally
-			{
-				StopFilter.SetEnablePositionIncrementsDefault(defaultEnable);
-			}
-		}
+        {
+            var stopWordsSet = new System.Collections.Generic.HashSet<string>();
+            stopWordsSet.Add("good");
+            stopWordsSet.Add("test");
+            stopWordsSet.Add("analyzed");
+            var newStop = new StopAnalyzer(Version.LUCENE_CURRENT, stopWordsSet);
+            var reader = new System.IO.StringReader("This is a good test of the english stop
analyzer with positions");
+            int[] expectedIncr =                   { 1,   1, 1,          3, 1,  1,      1,
           2,   1};
+            TokenStream stream = newStop.TokenStream("test", reader);
+            Assert.NotNull(stream);
+            int i = 0;
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
+            PositionIncrementAttribute posIncrAtt = stream.AddAttribute<PositionIncrementAttribute>();
+
+            while (stream.IncrementToken())
+            {
+                string text = termAtt.Term();
+                Assert.IsFalse(stopWordsSet.Contains(text));
+                Assert.AreEqual(expectedIncr[i++], posIncrAtt.GetPositionIncrement());
+            }
+        }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopFilter.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopFilter.cs
(original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestStopFilter.cs
Sat Nov 12 20:01:55 2011
@@ -39,9 +39,9 @@ namespace Lucene.Net.Analysis
 		public virtual void  TestExactCase()
 		{
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
-			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
-			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopWords);
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+			var stopWords = new System.Collections.Generic.HashSet<string> {"is", "the", "Time"};
+			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopWords,
false);
+            TermAttribute termAtt = (TermAttribute)stream.GetAttribute<TermAttribute>();
 			Assert.IsTrue(stream.IncrementToken());
 			Assert.AreEqual("Now", termAtt.Term());
 			Assert.IsTrue(stream.IncrementToken());
@@ -53,9 +53,9 @@ namespace Lucene.Net.Analysis
 		public virtual void  TestIgnoreCase()
 		{
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
-			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
+            var stopWords = new System.Collections.Generic.HashSet<string> { "is",
"the", "Time" };
 			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopWords,
true);
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
 			Assert.IsTrue(stream.IncrementToken());
 			Assert.AreEqual("Now", termAtt.Term());
 			Assert.IsFalse(stream.IncrementToken());
@@ -66,9 +66,9 @@ namespace Lucene.Net.Analysis
 		{
 			System.IO.StringReader reader = new System.IO.StringReader("Now is The Time");
 			System.String[] stopWords = new System.String[]{"is", "the", "Time"};
-			System.Collections.Hashtable stopSet = StopFilter.MakeStopSet(stopWords);
+			var stopSet = StopFilter.MakeStopSet(stopWords);
 			TokenStream stream = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet);
-			TermAttribute termAtt = (TermAttribute) stream.GetAttribute(typeof(TermAttribute));
+            TermAttribute termAtt = stream.GetAttribute<TermAttribute>();
 			Assert.IsTrue(stream.IncrementToken());
 			Assert.AreEqual("Now", termAtt.Term());
 			Assert.IsTrue(stream.IncrementToken());
@@ -93,7 +93,7 @@ namespace Lucene.Net.Analysis
 			System.String[] stopWords = (System.String[]) a.ToArray();
 			for (int i = 0; i < a.Count; i++)
 				Log("Stop: " + stopWords[i]);
-			System.Collections.Hashtable stopSet = StopFilter.MakeStopSet(stopWords);
+			var stopSet = StopFilter.MakeStopSet(stopWords);
 			// with increments
 			System.IO.StringReader reader = new System.IO.StringReader(sb.ToString());
 			StopFilter stpf = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet);
@@ -122,8 +122,8 @@ namespace Lucene.Net.Analysis
 			System.String[] stopWords1 = (System.String[]) a1.ToArray();
 			for (int i = 0; i < a1.Count; i++)
 				Log("Stop1: " + stopWords1[i]);
-			System.Collections.Hashtable stopSet0 = StopFilter.MakeStopSet(stopWords0);
-			System.Collections.Hashtable stopSet1 = StopFilter.MakeStopSet(stopWords1);
+			var stopSet0 = StopFilter.MakeStopSet(stopWords0);
+			var stopSet1 = StopFilter.MakeStopSet(stopWords1);
 			reader = new System.IO.StringReader(sb.ToString());
 			StopFilter stpf0 = new StopFilter(false, new WhitespaceTokenizer(reader), stopSet0); //
first part of the set
 			stpf0.SetEnablePositionIncrements(true);
@@ -135,8 +135,8 @@ namespace Lucene.Net.Analysis
 		{
 			Log("---> test with enable-increments-" + (enableIcrements?"enabled":"disabled"));
 			stpf.SetEnablePositionIncrements(enableIcrements);
-			TermAttribute termAtt = (TermAttribute) stpf.GetAttribute(typeof(TermAttribute));
-			PositionIncrementAttribute posIncrAtt = (PositionIncrementAttribute) stpf.GetAttribute(typeof(PositionIncrementAttribute));
+            TermAttribute termAtt = stpf.GetAttribute<TermAttribute>();
+            PositionIncrementAttribute posIncrAtt = stpf.GetAttribute<PositionIncrementAttribute>();
 			for (int i = 0; i < 20; i += 3)
 			{
 				Assert.IsTrue(stpf.IncrementToken());

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestToken.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestToken.cs?rev=1201314&r1=1201313&r2=1201314&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestToken.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/test/core/Analysis/TestToken.cs Sat
Nov 12 20:01:55 2011
@@ -16,9 +16,12 @@
  */
 
 using System;
-
+using System.IO;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Util;
 using NUnit.Framework;
-
+using Attribute = Lucene.Net.Util.Attribute;
+using FlagsAttribute = Lucene.Net.Analysis.Tokenattributes.FlagsAttribute;
 using Payload = Lucene.Net.Index.Payload;
 using TestSimpleAttributeImpls = Lucene.Net.Analysis.Tokenattributes.TestSimpleAttributeImpls;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
@@ -164,7 +167,7 @@ namespace Lucene.Net.Analysis
 			t.SetTermBuffer(b, 0, 5);
 			Assert.AreEqual("(aloha,0,5)", t.ToString());
 			
-			t.SetTermText("hi there");
+			t.SetTermBuffer("hi there");
 			Assert.AreEqual("(hi there,0,5)", t.ToString());
 		}
 		
@@ -189,20 +192,17 @@ namespace Lucene.Net.Analysis
 		public virtual void  TestMixedStringArray()
 		{
 			Token t = new Token("hello", 0, 5);
-			Assert.AreEqual(t.TermText(), "hello");
 			Assert.AreEqual(t.TermLength(), 5);
 			Assert.AreEqual(t.Term(), "hello");
-			t.SetTermText("hello2");
+			t.SetTermBuffer("hello2");
 			Assert.AreEqual(t.TermLength(), 6);
 			Assert.AreEqual(t.Term(), "hello2");
 			t.SetTermBuffer("hello3".ToCharArray(), 0, 6);
-			Assert.AreEqual(t.TermText(), "hello3");
+			Assert.AreEqual(t.Term(), "hello3");
 			
-			// Make sure if we get the buffer and change a character
-			// that termText() reflects the change
 			char[] buffer = t.TermBuffer();
 			buffer[1] = 'o';
-			Assert.AreEqual(t.TermText(), "hollo3");
+			Assert.AreEqual(t.Term(), "hollo3");
 		}
 		
         [Test]
@@ -245,5 +245,42 @@ namespace Lucene.Net.Analysis
 			Assert.AreEqual(pl, copy.GetPayload());
             Assert.AreNotSame(pl, copy.GetPayload());
 		}
+
+        public interface SenselessAttribute : Attribute {}
+
+        public class SenselessAttributeImpl : AttributeImpl, SenselessAttribute
+        {
+            public override void CopyTo(AttributeImpl target) 
+            { }
+
+            public override void Clear() 
+            { }
+
+            public override bool Equals(object other)
+            {
+                return other is SenselessAttributeImpl;
+            }
+
+            public override int GetHashCode()
+            {
+                return 0;
+            }
+        }
+
+        [Test]
+        public void TestTokenAttributeFactory()
+        {
+            TokenStream ts = new WhitespaceTokenizer(Token.TOKEN_ATTRIBUTE_FACTORY, new StringReader("foo,
bar"));
+
+            Assert.IsTrue(ts.AddAttribute<SenselessAttribute>() is SenselessAttributeImpl,
+                          "TypeAttribute is not implemented by SenselessAttributeImpl");
+
+            Assert.IsTrue(ts.AddAttribute<TermAttribute>() is Token, "TermAttribute
is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<OffsetAttribute>() is Token, "OffsetAttribute
is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<FlagsAttribute>() is Token, "FlagsAttribute
is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<PayloadAttribute>() is Token, "PayloadAttribute
is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<PositionIncrementAttribute>() is Token, "PositionIncrementAttribute
is not implemented by Token");
+            Assert.IsTrue(ts.AddAttribute<TypeAttribute>() is Token, "TypeAttribute
is not implemented by Token");
+        }
 	}
 }
\ No newline at end of file



Mime
View raw message