lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1294875 [39/45] - in /incubator/lucene.net/trunk: ./ build/ build/vs2010/contrib/ build/vs2010/test/ doc/ src/ src/contrib/Analyzers/ src/contrib/Analyzers/AR/ src/contrib/Analyzers/BR/ src/contrib/Analyzers/CJK/ src/contrib/Analyzers/Cn/ ...
Date Tue, 28 Feb 2012 22:43:28 GMT
Modified: incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestParallelReader.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
@@ -77,14 +77,14 @@ namespace Lucene.Net.Index
 			Directory dir1 = GetDir1();
 			Directory dir2 = GetDir2();
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
             System.Collections.Generic.ICollection<string> fieldNames = pr.GetFieldNames(IndexReader.FieldOption.ALL);
 			Assert.AreEqual(4, fieldNames.Count);
-			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f1"));
-			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f2"));
-			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f3"));
-			Assert.IsTrue(SupportClass.CollectionsHelper.Contains(fieldNames, "f4"));
+			Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "f1"));
+			Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "f2"));
+			Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "f3"));
+			Assert.IsTrue(CollectionsHelper.Contains(fieldNames, "f4"));
 		}
 		
 		[Test]
@@ -93,11 +93,11 @@ namespace Lucene.Net.Index
 			Directory dir1 = GetDir1();
 			Directory dir2 = GetDir2();
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			
 			Document doc11 = pr.Document(0, new MapFieldSelector(new System.String[]{"f1"}));
-			Document doc24 = pr.Document(1, new MapFieldSelector(new System.Collections.ArrayList(new System.String[]{"f4"})));
+			Document doc24 = pr.Document(1, new MapFieldSelector(new System.String[]{"f4"}));
 			Document doc223 = pr.Document(1, new MapFieldSelector(new System.String[]{"f2", "f3"}));
 			
 			Assert.AreEqual(1, doc11.GetFields().Count);
@@ -118,17 +118,17 @@ namespace Lucene.Net.Index
 			
 			// one document only:
 			Directory dir2 = new MockRAMDirectory();
-			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document d3 = new Document();
 			d3.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			w2.AddDocument(d3);
 			w2.Close();
 			
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
+            pr.Add(IndexReader.Open(dir1, false));
 			try
 			{
-				pr.Add(IndexReader.Open(dir2));
+                pr.Add(IndexReader.Open(dir2, false));
 				Assert.Fail("didn't get exptected exception: indexes don't have same number of documents");
 			}
 			catch (System.ArgumentException e)
@@ -143,19 +143,19 @@ namespace Lucene.Net.Index
 			Directory dir1 = GetDir1();
 			Directory dir2 = GetDir2();
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+			pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			
 			Assert.IsTrue(pr.IsCurrent());
-			IndexReader modifier = IndexReader.Open(dir1);
+            IndexReader modifier = IndexReader.Open(dir1, false);
 			modifier.SetNorm(0, "f1", 100);
 			modifier.Close();
 			
 			// one of the two IndexReaders which ParallelReader is using
 			// is not current anymore
 			Assert.IsFalse(pr.IsCurrent());
-			
-			modifier = IndexReader.Open(dir2);
+
+            modifier = IndexReader.Open(dir2, false);
 			modifier.SetNorm(0, "f3", 100);
 			modifier.Close();
 			
@@ -170,13 +170,13 @@ namespace Lucene.Net.Index
 			Directory dir2 = GetDir2();
 			
 			// add another document to ensure that the indexes are not optimized
-			IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
 			Document d = new Document();
 			d.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			modifier.AddDocument(d);
 			modifier.Close();
 			
-			modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			modifier = new IndexWriter(dir2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
 			d = new Document();
 			d.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			modifier.AddDocument(d);
@@ -184,30 +184,30 @@ namespace Lucene.Net.Index
 			
 			
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			Assert.IsFalse(pr.IsOptimized());
 			pr.Close();
 			
-			modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			modifier = new IndexWriter(dir1, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
 			modifier.Optimize();
 			modifier.Close();
 			
 			pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			// just one of the two indexes are optimized
 			Assert.IsFalse(pr.IsOptimized());
 			pr.Close();
 			
 			
-			modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
+			modifier = new IndexWriter(dir2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
 			modifier.Optimize();
 			modifier.Close();
 			
 			pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			// now both indexes are optimized
 			Assert.IsTrue(pr.IsOptimized());
 			pr.Close();
@@ -219,8 +219,8 @@ namespace Lucene.Net.Index
 			Directory dir1 = GetDir1();
 			Directory dir2 = GetDir2();
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			int NUM_DOCS = 2;
 			TermDocs td = pr.TermDocs(null);
 			for (int i = 0; i < NUM_DOCS; i++)
@@ -257,7 +257,7 @@ namespace Lucene.Net.Index
 		private Searcher Single()
 		{
 			Directory dir = new MockRAMDirectory();
-			IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document d1 = new Document();
 			d1.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d1.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -271,8 +271,8 @@ namespace Lucene.Net.Index
 			d2.Add(new Field("f4", "v2", Field.Store.YES, Field.Index.ANALYZED));
 			w.AddDocument(d2);
 			w.Close();
-			
-			return new IndexSearcher(dir);
+
+            return new IndexSearcher(dir, false);
 		}
 		
 		// Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader:
@@ -281,15 +281,15 @@ namespace Lucene.Net.Index
 			Directory dir1 = GetDir1();
 			Directory dir2 = GetDir2();
 			ParallelReader pr = new ParallelReader();
-			pr.Add(IndexReader.Open(dir1));
-			pr.Add(IndexReader.Open(dir2));
+            pr.Add(IndexReader.Open(dir1, false));
+            pr.Add(IndexReader.Open(dir2, false));
 			return new IndexSearcher(pr);
 		}
 		
 		private Directory GetDir1()
 		{
 			Directory dir1 = new MockRAMDirectory();
-			IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document d1 = new Document();
 			d1.Add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d1.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -305,7 +305,7 @@ namespace Lucene.Net.Index
 		private Directory GetDir2()
 		{
 			Directory dir2 = new RAMDirectory();
-			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			Document d3 = new Document();
 			d3.Add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
 			d3.Add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));

Modified: incubator/lucene.net/trunk/test/core/Index/TestParallelTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestParallelTermEnum.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestParallelTermEnum.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestParallelTermEnum.cs Tue Feb 28 22:43:08 2012
@@ -63,8 +63,8 @@ namespace Lucene.Net.Index
 			
 			iw2.Close();
 			
-			this.ir1 = IndexReader.Open(rd1);
-			this.ir2 = IndexReader.Open(rd2);
+			this.ir1 = IndexReader.Open(rd1, true);
+		    this.ir2 = IndexReader.Open(rd2, true);
 		}
 		
 		[TearDown]

Modified: incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestPayloads.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -42,7 +42,7 @@ namespace Lucene.Net.Index
     [TestFixture]
 	public class TestPayloads:LuceneTestCase
 	{
-		private class AnonymousClassThread:SupportClass.ThreadClass
+		private class AnonymousClassThread:ThreadClass
 		{
 			public AnonymousClassThread(int numDocs, System.String field, Lucene.Net.Index.TestPayloads.ByteArrayPool pool, Lucene.Net.Index.IndexWriter writer, TestPayloads enclosingInstance)
 			{
@@ -214,7 +214,7 @@ namespace Lucene.Net.Index
 			PerformTest(dir);
 			
 			// now use a FSDirectory and repeat same test
-			System.IO.FileInfo dirName = _TestUtil.GetTempDir("test_payloads");
+			System.IO.DirectoryInfo dirName = _TestUtil.GetTempDir("test_payloads");
 			dir = FSDirectory.Open(dirName);
 			PerformTest(dir);
 			_TestUtil.RmDir(dirName);
@@ -261,7 +261,7 @@ namespace Lucene.Net.Index
 			}
 			
 			// make sure we create more than one segment to test merging
-			writer.Flush();
+			writer.Commit();
 			
 			// now we make sure to have different payload lengths next at the next skip point        
 			for (int i = 0; i < numDocs; i++)
@@ -280,7 +280,7 @@ namespace Lucene.Net.Index
 			* Verify the index
 			* first we test if all payloads are stored correctly
 			*/
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, true);
 			
 			byte[] verifyPayloadData = new byte[payloadDataLength];
 			offset = 0;
@@ -388,8 +388,8 @@ namespace Lucene.Net.Index
 			writer.Optimize();
 			// flush
 			writer.Close();
-			
-			reader = IndexReader.Open(dir);
+
+		    reader = IndexReader.Open(dir, true);
 			tp = reader.TermPositions(new Term(fieldName, singleTerm));
 			tp.Next();
 			tp.NextPosition();
@@ -520,7 +520,7 @@ namespace Lucene.Net.Index
 				this.data = data;
 				this.length = length;
 				this.offset = offset;
-				payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
+				payloadAtt =  AddAttribute<PayloadAttribute>();
 			}
 			
 			public override bool IncrementToken()
@@ -561,7 +561,7 @@ namespace Lucene.Net.Index
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 			System.String field = "test";
 			
-			SupportClass.ThreadClass[] ingesters = new SupportClass.ThreadClass[numThreads];
+			ThreadClass[] ingesters = new ThreadClass[numThreads];
 			for (int i = 0; i < numThreads; i++)
 			{
 				ingesters[i] = new AnonymousClassThread(numDocs, field, pool, writer, this);
@@ -573,7 +573,7 @@ namespace Lucene.Net.Index
 				ingesters[i].Join();
 			}
 			writer.Close();
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, true);
 			TermEnum terms = reader.Terms();
 			while (terms.Next())
 			{
@@ -626,8 +626,8 @@ namespace Lucene.Net.Index
 				Enclosing_Instance.GenerateRandomData(payload);
 				term = pool.BytesToString(payload);
 				first = true;
-				payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
-				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+				payloadAtt =  AddAttribute<PayloadAttribute>();
+				termAtt =  AddAttribute<TermAttribute>();
 			}
 			
 			public override bool IncrementToken()
@@ -640,11 +640,11 @@ namespace Lucene.Net.Index
 				payloadAtt.SetPayload(new Payload(payload));
 				return true;
 			}
-			
-			public override void  Close()
-			{
-				pool.Release(payload);
-			}
+
+            protected override void Dispose(bool disposing)
+            {
+                pool.Release(payload);
+            }
 		}
 		
 		internal class ByteArrayPool

Modified: incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestPositionBasedTermVectorMapper.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
@@ -81,20 +81,20 @@ namespace Lucene.Net.Index
 				System.String token = tokens[i];
 				mapper.Map(token, 1, null, thePositions[i]);
 			}
-			System.Collections.IDictionary map = mapper.GetFieldToTerms();
+			var map = mapper.GetFieldToTerms();
 			Assert.IsTrue(map != null, "map is null and it shouldn't be");
 			Assert.IsTrue(map.Count == 1, "map Size: " + map.Count + " is not: " + 1);
-			System.Collections.IDictionary positions = (System.Collections.IDictionary) map["test"];
-			Assert.IsTrue(positions != null, "thePositions is null and it shouldn't be");
-			
-			Assert.IsTrue(positions.Count == numPositions, "thePositions Size: " + positions.Count + " is not: " + numPositions);
+			var positions = map["test"];
+			Assert.IsNotNull(positions, "thePositions is null and it shouldn't be");
+
+            Assert.AreEqual(numPositions, positions.Count, "thePositions Size: " + positions.Count + " is not: " + numPositions);
 			System.Collections.BitArray bits = new System.Collections.BitArray((numPositions % 64 == 0?numPositions / 64:numPositions / 64 + 1) * 64);
-			for (System.Collections.IEnumerator iterator = positions.GetEnumerator(); iterator.MoveNext(); )
+			for (var iterator = positions.GetEnumerator(); iterator.MoveNext(); )
 			{
-				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
-				PositionBasedTermVectorMapper.TVPositionInfo info = (PositionBasedTermVectorMapper.TVPositionInfo) entry.Value;
+				var entry = iterator.Current;
+				PositionBasedTermVectorMapper.TVPositionInfo info = entry.Value;
 				Assert.IsTrue(info != null, "info is null and it shouldn't be");
-				int pos = ((System.Int32) entry.Key);
+				int pos = (int)entry.Key;
 				bits.Set(pos, true);
 				Assert.IsTrue(info.Position == pos, info.Position + " does not equal: " + pos);
 				Assert.IsTrue(info.Offsets != null, "info.getOffsets() is null and it shouldn't be");
@@ -109,7 +109,7 @@ namespace Lucene.Net.Index
 					Assert.IsTrue(info.Offsets.Count == 1, "info.getOffsets() Size: " + info.Offsets.Count + " is not: " + 1);
 				}
 			}
-			Assert.IsTrue(SupportClass.BitSetSupport.Cardinality(bits) == numPositions, "Bits are not all on");
+			Assert.IsTrue(BitSetSupport.Cardinality(bits) == numPositions, "Bits are not all on");
 		}
     }
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentMerger.cs Tue Feb 28 22:43:08 2012
@@ -59,8 +59,8 @@ namespace Lucene.Net.Index
 			SegmentInfo info1 = DocHelper.WriteDoc(merge1Dir, doc1);
 			DocHelper.SetupDoc(doc2);
 			SegmentInfo info2 = DocHelper.WriteDoc(merge2Dir, doc2);
-			reader1 = SegmentReader.Get(info1);
-			reader2 = SegmentReader.Get(info2);
+			reader1 = SegmentReader.Get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+            reader2 = SegmentReader.Get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 		}
 
         [TearDown]
@@ -93,7 +93,7 @@ namespace Lucene.Net.Index
 			merger.CloseReaders();
 			Assert.IsTrue(docsMerged == 2);
 			//Should be able to open a new SegmentReader against the new directory
-			SegmentReader mergedReader = SegmentReader.Get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true));
+            SegmentReader mergedReader = SegmentReader.Get(true, new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(mergedReader != null);
 			Assert.IsTrue(mergedReader.NumDocs() == 2);
 			Document newDoc1 = mergedReader.Document(0);
@@ -111,7 +111,7 @@ namespace Lucene.Net.Index
 			System.Collections.Generic.ICollection<string> stored = mergedReader.GetFieldNames(IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR);
 			Assert.IsTrue(stored != null);
 			//System.out.println("stored size: " + stored.size());
-			Assert.IsTrue(stored.Count == 4, "We do not have 4 fields that were indexed with term vector");
+			Assert.IsTrue(stored.Count == 3, "We do not have 3 fields that were indexed with term vector");
 			
 			TermFreqVector vector = mergedReader.GetTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY);
 			Assert.IsTrue(vector != null);

Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentReader.cs Tue Feb 28 22:43:08 2012
@@ -50,7 +50,7 @@ namespace Lucene.Net.Index
 			base.SetUp();
 			DocHelper.SetupDoc(testDoc);
 			SegmentInfo info = DocHelper.WriteDoc(dir, testDoc);
-			reader = SegmentReader.Get(info);
+            reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 		}
 
         [TearDown]
@@ -79,10 +79,9 @@ namespace Lucene.Net.Index
 			//There are 2 unstored fields on the document that are not preserved across writing
 			Assert.IsTrue(DocHelper.NumFields(result) == DocHelper.NumFields(testDoc) - DocHelper.unstored.Count);
 			
-			System.Collections.IList fields = result.GetFields();
-			for (System.Collections.IEnumerator iter = fields.GetEnumerator(); iter.MoveNext(); )
+			var fields = result.GetFields();
+            foreach (var field in fields)
 			{
-				Fieldable field = (Fieldable) iter.Current;
 				Assert.IsTrue(field != null);
 				Assert.IsTrue(DocHelper.nameValues.Contains(field.Name()));
 			}
@@ -94,7 +93,7 @@ namespace Lucene.Net.Index
 			Document docToDelete = new Document();
 			DocHelper.SetupDoc(docToDelete);
 			SegmentInfo info = DocHelper.WriteDoc(dir, docToDelete);
-			SegmentReader deleteReader = SegmentReader.Get(info);
+            SegmentReader deleteReader = SegmentReader.Get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(deleteReader != null);
 			Assert.IsTrue(deleteReader.NumDocs() == 1);
 			deleteReader.DeleteDocument(0);
@@ -199,16 +198,7 @@ namespace Lucene.Net.Index
 						// test for fake norms of 1.0 or null depending on the flag
 						byte[] norms = reader.Norms(f.Name());
 						byte norm1 = DefaultSimilarity.EncodeNorm(1.0f);
-						if (reader.GetDisableFakeNorms())
-							Assert.IsNull(norms);
-						else
-						{
-							Assert.AreEqual(norms.Length, reader.MaxDoc());
-							for (int j = 0; j < reader.MaxDoc(); j++)
-							{
-								Assert.AreEqual(norms[j], norm1);
-							}
-						}
+						Assert.IsNull(norms);
 						norms = new byte[reader.MaxDoc()];
 						reader.Norms(f.Name(), norms, 0);
 						for (int j = 0; j < reader.MaxDoc(); j++)
@@ -238,7 +228,7 @@ namespace Lucene.Net.Index
 			
 			TermFreqVector[] results = reader.GetTermFreqVectors(0);
 			Assert.IsTrue(results != null);
-			Assert.IsTrue(results.Length == 4, "We do not have 4 term freq vectors, we have: " + results.Length);
+			Assert.IsTrue(results.Length == 3, "We do not have 3 term freq vectors, we have: " + results.Length);
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/test/core/Index/TestSegmentTermEnum.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSegmentTermEnum.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSegmentTermEnum.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSegmentTermEnum.cs Tue Feb 28 22:43:08 2012
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
 		
 		private void  VerifyDocFreq()
 		{
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, true);
 			TermEnum termEnum = null;
 			
 			// create enumeration of all terms

Modified: incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestSnapshotDeletionPolicy.cs Tue Feb 28 22:43:08 2012
@@ -19,7 +19,7 @@
 // that we do not require any package private access.
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
@@ -48,7 +48,7 @@ namespace Lucene.Net.Index
     [TestFixture]
 	public class TestSnapshotDeletionPolicy:LuceneTestCase
 	{
-		private class AnonymousClassThread:SupportClass.ThreadClass
+		private class AnonymousClassThread:ThreadClass
 		{
 			public AnonymousClassThread(long stopTime, Lucene.Net.Index.IndexWriter writer, TestSnapshotDeletionPolicy enclosingInstance)
 			{
@@ -75,30 +75,40 @@ namespace Lucene.Net.Index
 			{
 				Document doc = new Document();
 				doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-				while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < stopTime)
-				{
-					for (int i = 0; i < 27; i++)
-					{
-						try
-						{
-							writer.AddDocument(doc);
-						}
-						catch (System.Exception t)
-						{
-							System.Console.Out.WriteLine(t.StackTrace);
-							Assert.Fail("addDocument failed");
-						}
-					}
-					try
-					{
-						System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 1));
-					}
-					catch (System.Threading.ThreadInterruptedException ie)
-					{
-						SupportClass.ThreadClass.Current().Interrupt();
-						throw new System.SystemException("", ie);
-					}
-				}
+			    do
+			    {
+			        for (int i = 0; i < 27; i++)
+			        {
+			            try
+			            {
+			                writer.AddDocument(doc);
+			            }
+			            catch (System.Exception t)
+			            {
+			                System.Console.Out.WriteLine(t.StackTrace);
+			                Assert.Fail("addDocument failed");
+			            }
+			            if (i%2 == 0)
+			            {
+			                try
+			                {
+			                    writer.Commit();
+			                }
+			                catch (Exception e)
+			                {
+			                    throw new SystemException("", e);
+			                }
+			            }
+			        }
+			        try
+			        {
+			            System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000*1));
+			        }
+			        catch (System.Threading.ThreadInterruptedException ie)
+			        {
+			            throw;
+			        }
+			    } while ((DateTime.Now.Ticks/TimeSpan.TicksPerMillisecond) < stopTime);
 			}
 		}
 		public const System.String INDEX_PATH = "test.snapshots";
@@ -106,11 +116,9 @@ namespace Lucene.Net.Index
         [Test]
 		public virtual void  TestSnapshotDeletionPolicy_Renamed()
 		{
-			System.IO.FileInfo dir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), INDEX_PATH));
+			System.IO.DirectoryInfo dir = _TestUtil.GetTempDir(INDEX_PATH);
 			try
 			{
-				// Sometimes past test leaves the dir
-				_TestUtil.RmDir(dir);
 				Directory fsDir = FSDirectory.Open(dir);
 				RunTest(fsDir);
 				fsDir.Close();
@@ -131,27 +139,39 @@ namespace Lucene.Net.Index
 			Directory dir = new MockRAMDirectory();
 			
 			SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-			IndexWriter writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
-			// Force frequent commits
+            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+			// Force frequent flushes
 			writer.SetMaxBufferedDocs(2);
 			Document doc = new Document();
 			doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-			for (int i = 0; i < 7; i++)
-				writer.AddDocument(doc);
-			IndexCommit cp = (IndexCommit) dp.Snapshot();
+            for (int i = 0; i < 7; i++)
+            {
+                writer.AddDocument(doc);
+                if (i % 2 == 0)
+                {
+                    writer.Commit();
+                }
+            }
+            IndexCommit cp =  dp.Snapshot();
 			CopyFiles(dir, cp);
 			writer.Close();
 			CopyFiles(dir, cp);
-			
-			writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
+
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
 			CopyFiles(dir, cp);
-			for (int i = 0; i < 7; i++)
-				writer.AddDocument(doc);
+            for (int i = 0; i < 7; i++)
+            {
+                writer.AddDocument(doc);
+                if (i % 2 == 0)
+                {
+                    writer.Commit();
+                }
+            }
 			CopyFiles(dir, cp);
 			writer.Close();
 			CopyFiles(dir, cp);
 			dp.Release();
-			writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
+            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.Close();
 			try
 			{
@@ -171,24 +191,24 @@ namespace Lucene.Net.Index
 			long stopTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 7000;
 			
 			SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-			IndexWriter writer = new IndexWriter(dir, true, new StandardAnalyzer(), dp);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
 			
-			// Force frequent commits
+			// Force frequent flushes
 			writer.SetMaxBufferedDocs(2);
 			
-			SupportClass.ThreadClass t = new AnonymousClassThread(stopTime, writer, this);
+			ThreadClass t = new AnonymousClassThread(stopTime, writer, this);
 			
 			t.Start();
 			
 			// While the above indexing thread is running, take many
 			// backups:
-			while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < stopTime)
-			{
-				BackupIndex(dir, dp);
-				System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * 20));
-				if (!t.IsAlive)
-					break;
-			}
+		    do
+		    {
+		        BackupIndex(dir, dp);
+		        System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000*20));
+		        if (!t.IsAlive)
+		            break;
+		    } while ((DateTime.Now.Ticks/TimeSpan.TicksPerMillisecond) < stopTime);
 			
 			t.Join();
 			
@@ -234,10 +254,8 @@ namespace Lucene.Net.Index
 			// we take to do the backup, the IndexWriter will
 			// never delete the files in the snapshot:
 			System.Collections.Generic.ICollection<string> files = cp.GetFileNames();
-			System.Collections.IEnumerator it = files.GetEnumerator();
-			while (it.MoveNext())
+            foreach (string fileName in files)
 			{
-				System.String fileName = (System.String) it.Current;
 				// NOTE: in a real backup you would not use
 				// readFile; you would need to use something else
 				// that copies the file to a backup location.  This

Modified: incubator/lucene.net/trunk/test/core/Index/TestStressIndexing.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestStressIndexing.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestStressIndexing.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestStressIndexing.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Lucene.Net.Analysis;
@@ -38,7 +38,7 @@ namespace Lucene.Net.Index
 		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
 		private System.Random RANDOM;
 		
-		abstract public class TimedThread:SupportClass.ThreadClass
+		abstract public class TimedThread:ThreadClass
 		{
 			internal bool failed;
 			internal int count;
@@ -68,7 +68,7 @@ namespace Lucene.Net.Index
 				}
 				catch (System.Exception e)
 				{
-					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current() + ": exc");
+					System.Console.Out.WriteLine(ThreadClass.Current() + ": exc");
 					System.Console.Out.WriteLine(e.StackTrace);
 					failed = true;
 				}
@@ -151,9 +151,9 @@ namespace Lucene.Net.Index
 		Run one indexer and 2 searchers against single index as
 		stress test.
 		*/
-		public virtual void  RunStressTest(Directory directory, bool autoCommit, MergeScheduler mergeScheduler)
+		public virtual void  RunStressTest(Directory directory, MergeScheduler mergeScheduler)
 		{
-			IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true);
+		    IndexWriter modifier = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 			
 			modifier.SetMaxBufferedDocs(10);
 			
@@ -204,36 +204,16 @@ namespace Lucene.Net.Index
 		public virtual void  TestStressIndexAndSearching()
 		{
 			RANDOM = NewRandom();
-			
-			// RAMDir
-			Directory directory = new MockRAMDirectory();
-			RunStressTest(directory, true, null);
-			directory.Close();
-			
-			// FSDir
-			System.IO.FileInfo dirPath = _TestUtil.GetTempDir("lucene.test.stress");
-			directory = FSDirectory.Open(dirPath);
-			RunStressTest(directory, true, null);
-			directory.Close();
-			
+
 			// With ConcurrentMergeScheduler, in RAMDir
-			directory = new MockRAMDirectory();
-			RunStressTest(directory, true, new ConcurrentMergeScheduler());
+			Directory directory = new MockRAMDirectory();
+			RunStressTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			
 			// With ConcurrentMergeScheduler, in FSDir
+		    var dirPath = _TestUtil.GetTempDir("lucene.test.stress");
 			directory = FSDirectory.Open(dirPath);
-			RunStressTest(directory, true, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler and autoCommit=false, in RAMDir
-			directory = new MockRAMDirectory();
-			RunStressTest(directory, false, new ConcurrentMergeScheduler());
-			directory.Close();
-			
-			// With ConcurrentMergeScheduler and autoCommit=false, in FSDir
-			directory = FSDirectory.Open(dirPath);
-			RunStressTest(directory, false, new ConcurrentMergeScheduler());
+			RunStressTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			
 			_TestUtil.RmDir(dirPath);

Modified: incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestStressIndexing2.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,9 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using System.Linq;
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Lucene.Net.Analysis;
@@ -43,7 +45,6 @@ namespace Lucene.Net.Index
 		internal static int maxFields = 4;
 		internal static int bigFieldSize = 10;
 		internal static bool sameFieldOrder = false;
-		internal static bool autoCommit = false;
 		internal static int mergeFactor = 3;
 		internal static int maxBufferedDocs = 3;
 		new internal static int seed = 0;
@@ -66,7 +67,7 @@ namespace Lucene.Net.Index
 				
 			}
 			
-			public MockIndexWriter(TestStressIndexing2 enclosingInstance, Directory dir, bool autoCommit, Analyzer a, bool create):base(dir, autoCommit, a, create)
+			public MockIndexWriter(TestStressIndexing2 enclosingInstance, Directory dir, Analyzer a, bool create, IndexWriter.MaxFieldLength mfl):base(dir, a, create, mfl)
 			{
 				InitBlock(enclosingInstance);
 			}
@@ -127,7 +128,6 @@ namespace Lucene.Net.Index
 			{
 				// increase iterations for better testing
 				sameFieldOrder = r.NextDouble() > 0.5;
-				autoCommit = r.NextDouble() > 0.5;
 				mergeFactor = r.Next(3) + 2;
 				maxBufferedDocs = r.Next(3) + 2;
 				seed++;
@@ -161,7 +161,7 @@ namespace Lucene.Net.Index
 		public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir)
 		{
 			System.Collections.Hashtable docs = new System.Collections.Hashtable();
-			IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
+			IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 			w.SetUseCompoundFile(false);
 			
 			/***
@@ -204,7 +204,7 @@ namespace Lucene.Net.Index
 				IndexingThread th = threads[i];
 				lock (th)
 				{
-					SupportClass.CollectionsHelper.AddAllIfNotContains(docs, th.docs);
+					CollectionsHelper.AddAllIfNotContains(docs, th.docs);
 				}
 			}
 			
@@ -220,7 +220,7 @@ namespace Lucene.Net.Index
 			System.Collections.IDictionary docs = new System.Collections.Hashtable();
 			for (int iter = 0; iter < 3; iter++)
 			{
-				IndexWriter w = new MockIndexWriter(this, dir, autoCommit, new WhitespaceAnalyzer(), true);
+				IndexWriter w = new MockIndexWriter(this, dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
 				w.SetUseCompoundFile(false);
 				
 				// force many merges
@@ -280,7 +280,7 @@ namespace Lucene.Net.Index
 			while (iter.MoveNext())
 			{
 				Document d = (Document) iter.Current;
-				System.Collections.ArrayList fields = new System.Collections.ArrayList();
+                var fields = new List<Fieldable>();
 				fields.AddRange(d.GetFields());
 				// put fields in same order each time
                 //{{Lucene.Net-2.9.1}} No, don't change the order of the fields
@@ -301,15 +301,15 @@ namespace Lucene.Net.Index
 		
 		public static void  VerifyEquals(IndexReader r1, Directory dir2, System.String idField)
 		{
-			IndexReader r2 = IndexReader.Open(dir2);
+		    IndexReader r2 = IndexReader.Open(dir2, true);
 			VerifyEquals(r1, r2, idField);
 			r2.Close();
 		}
 		
 		public static void  VerifyEquals(Directory dir1, Directory dir2, System.String idField)
 		{
-			IndexReader r1 = IndexReader.Open(dir1);
-			IndexReader r2 = IndexReader.Open(dir2);
+			IndexReader r1 = IndexReader.Open(dir1, true);
+		    IndexReader r2 = IndexReader.Open(dir2, true);
 			VerifyEquals(r1, r2, idField);
 			r1.Close();
 			r2.Close();
@@ -479,16 +479,16 @@ namespace Lucene.Net.Index
 		
 		public static void  VerifyEquals(Document d1, Document d2)
 		{
-			System.Collections.IList ff1 = d1.GetFields();
-			System.Collections.IList ff2 = d2.GetFields();
-			
-			SupportClass.CollectionsHelper.Sort(ff1, fieldNameComparator);
-			SupportClass.CollectionsHelper.Sort(ff2, fieldNameComparator);
+			var ff1 = d1.GetFields();
+			var ff2 = d2.GetFields();
+
+		    ff1.OrderBy(x => x.Name());
+		    ff2.OrderBy(x => x.Name());
 			
 			if (ff1.Count != ff2.Count)
 			{
-				System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff1));
-				System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff2));
+                System.Console.Out.WriteLine("[" + String.Join(",", ff1.Select(x => x.ToString())) + "]");
+                System.Console.Out.WriteLine("[" + String.Join(",", ff2.Select(x => x.ToString())) + "]");
 				Assert.AreEqual(ff1.Count, ff2.Count);
 			}
 			
@@ -509,8 +509,8 @@ namespace Lucene.Net.Index
 					if (!s1.Equals(s2))
 					{
 						// print out whole doc on error
-						System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff1));
-						System.Console.Out.WriteLine(SupportClass.CollectionsHelper.CollectionToString(ff2));
+                        System.Console.Out.WriteLine("[" + String.Join(",", ff1.Select(x => x.ToString())) + "]");
+                        System.Console.Out.WriteLine("[" + String.Join(",", ff2.Select(x => x.ToString())) + "]");
 						Assert.AreEqual(s1, s2);
 					}
 				}
@@ -577,7 +577,7 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		internal class IndexingThread:SupportClass.ThreadClass
+		internal class IndexingThread:ThreadClass
 		{
 			internal IndexWriter w;
 			internal int base_Renamed;
@@ -662,7 +662,7 @@ namespace Lucene.Net.Index
 			public virtual System.String GetUTF8String(int nTokens)
 			{
 				int upto = 0;
-				SupportClass.CollectionsHelper.Fill(buffer, (char) 0);
+				CollectionsHelper.Fill(buffer, (char) 0);
 				for (int i = 0; i < nTokens; i++)
 					upto = AddUTF8Token(upto);
 				return new System.String(buffer, 0, upto);
@@ -730,7 +730,7 @@ namespace Lucene.Net.Index
 				
 				if (Lucene.Net.Index.TestStressIndexing2.sameFieldOrder)
 				{
-					SupportClass.CollectionsHelper.Sort(fields, Lucene.Net.Index.TestStressIndexing2.fieldNameComparator);
+					CollectionsHelper.Sort(fields, Lucene.Net.Index.TestStressIndexing2.fieldNameComparator);
 				}
 				else
 				{

Modified: incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTermVectorsReader.cs Tue Feb 28 22:43:08 2012
@@ -63,7 +63,7 @@ namespace Lucene.Net.Index
             InitBlock();
         }
 		
-		internal class TestToken : System.IComparable
+		internal class TestToken : System.IComparable<TestToken>
 		{
 			public TestToken(TestTermVectorsReader enclosingInstance)
 			{
@@ -86,9 +86,9 @@ namespace Lucene.Net.Index
 			internal int pos;
 			internal int startOffset;
 			internal int endOffset;
-			public virtual int CompareTo(System.Object other)
+			public virtual int CompareTo(TestToken other)
 			{
-				return pos - ((TestToken) other).pos;
+				return pos - other.pos;
 			}
 		}
 		
@@ -147,7 +147,7 @@ namespace Lucene.Net.Index
 			//terms
 			for (int j = 0; j < 5; j++)
 				writer.AddDocument(doc);
-			writer.Flush();
+			writer.Commit();
 			seg = writer.NewestSegment().name;
 			writer.Close();
 			
@@ -178,9 +178,9 @@ namespace Lucene.Net.Index
 			public MyTokenStream(TestTermVectorsReader enclosingInstance)
 			{
 				InitBlock(enclosingInstance);
-				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
-				posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
-				offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
+				termAtt =  AddAttribute<TermAttribute>();
+				posIncrAtt =  AddAttribute<PositionIncrementAttribute>();
+				offsetAtt =  AddAttribute<OffsetAttribute>();
 			}
 			
 			public override bool IncrementToken()
@@ -204,6 +204,11 @@ namespace Lucene.Net.Index
 					return true;
 				}
 			}
+
+		    protected override void Dispose(bool disposing)
+		    {
+		        // do nothing
+		    }
 		}
 		
 		private class MyAnalyzer:Analyzer
@@ -350,12 +355,12 @@ namespace Lucene.Net.Index
 			Assert.IsTrue(reader != null);
 			SortedTermVectorMapper mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
 			reader.Get(0, mapper);
-			System.Collections.Generic.SortedDictionary<Object,Object> set_Renamed = mapper.GetTermVectorEntrySet();
+			var set_Renamed = mapper.GetTermVectorEntrySet();
 			Assert.IsTrue(set_Renamed != null, "set is null and it shouldn't be");
 			//three fields, 4 terms, all terms are the same
 			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
 			//Check offsets and positions
-			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
+			for (System.Collections.IEnumerator iterator = set_Renamed.GetEnumerator(); iterator.MoveNext(); )
 			{
 				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -370,7 +375,7 @@ namespace Lucene.Net.Index
 			//three fields, 4 terms, all terms are the same
 			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
 			//Should have offsets and positions b/c we are munging all the fields together
-			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
+			for (System.Collections.IEnumerator iterator = set_Renamed.GetEnumerator(); iterator.MoveNext(); )
 			{
 				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
 				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -381,16 +386,16 @@ namespace Lucene.Net.Index
 			
 			FieldSortedTermVectorMapper fsMapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
 			reader.Get(0, fsMapper);
-			System.Collections.IDictionary map = fsMapper.GetFieldToTerms();
+			var map = fsMapper.GetFieldToTerms();
 			Assert.IsTrue(map.Count == testFields.Length, "map Size: " + map.Count + " is not: " + testFields.Length);
-			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(map).GetEnumerator(); iterator.MoveNext(); )
+			for (var iterator = map.GetEnumerator(); iterator.MoveNext(); )
 			{
-				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
-				System.Collections.Generic.SortedDictionary<Object,Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object,Object>)entry.Value;
+				var entry = iterator.Current;
+				var sortedSet = entry.Value;
 				Assert.IsTrue(sortedSet.Count == 4, "sortedSet Size: " + sortedSet.Count + " is not: " + 4);
-				for (System.Collections.IEnumerator inner = sortedSet.Keys.GetEnumerator(); inner.MoveNext(); )
+				for (var inner = sortedSet.GetEnumerator(); inner.MoveNext(); )
 				{
-					TermVectorEntry tve = (TermVectorEntry) inner.Current;
+					TermVectorEntry tve = inner.Current;
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
 					//Check offsets and positions.
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -416,14 +421,14 @@ namespace Lucene.Net.Index
 			reader.Get(0, fsMapper);
 			map = fsMapper.GetFieldToTerms();
 			Assert.IsTrue(map.Count == testFields.Length, "map Size: " + map.Count + " is not: " + testFields.Length);
-			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(map).GetEnumerator(); iterator.MoveNext(); )
+			for (var iterator = map.GetEnumerator(); iterator.MoveNext(); )
 			{
-				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
-				System.Collections.Generic.SortedDictionary<Object,Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object,Object>)entry.Value;
+				var entry = iterator.Current;
+				var sortedSet = entry.Value;
 				Assert.IsTrue(sortedSet.Count == 4, "sortedSet Size: " + sortedSet.Count + " is not: " + 4);
-				for (System.Collections.IEnumerator inner = sortedSet.Keys.GetEnumerator(); inner.MoveNext(); )
+				for (var inner = sortedSet.GetEnumerator(); inner.MoveNext(); )
 				{
-					TermVectorEntry tve = (TermVectorEntry) inner.Current;
+					TermVectorEntry tve = inner.Current;
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
 					//Check offsets and positions.
 					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
@@ -446,7 +451,7 @@ namespace Lucene.Net.Index
 			}
 			
 			// test setDocumentNumber()
-			IndexReader ir = IndexReader.Open(dir);
+		    IndexReader ir = IndexReader.Open(dir, true);
 			DocNumAwareMapper docNumAwareMapper = new DocNumAwareMapper();
 			Assert.AreEqual(- 1, docNumAwareMapper.GetDocumentNumber());
 			

Modified: incubator/lucene.net/trunk/test/core/Index/TestTermdocPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTermdocPerf.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTermdocPerf.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTermdocPerf.cs Tue Feb 28 22:43:08 2012
@@ -43,7 +43,7 @@ namespace Lucene.Net.Index
 		public RepeatingTokenStream(System.String val)
 		{
 			this.value_Renamed = val;
-			this.termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+			this.termAtt =  AddAttribute<TermAttribute>();
 		}
 		
 		public override bool IncrementToken()
@@ -57,6 +57,11 @@ namespace Lucene.Net.Index
 			}
 			return false;
 		}
+
+	    protected override void Dispose(bool disposing)
+	    {
+	        // Do Nothing
+	    }
 	}
 	
 	
@@ -130,8 +135,8 @@ namespace Lucene.Net.Index
 			AddDocs(dir, ndocs, "foo", "val", maxTF, percentDocs);
 			long end = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
 			System.Console.Out.WriteLine("milliseconds for creation of " + ndocs + " docs = " + (end - start));
-			
-			IndexReader reader = IndexReader.Open(dir);
+
+		    IndexReader reader = IndexReader.Open(dir, true);
 			TermEnum tenum = reader.Terms(new Term("foo", "val"));
 			TermDocs tdocs = reader.TermDocs();
 			

Modified: incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestThreadedOptimize.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -36,7 +36,7 @@ namespace Lucene.Net.Index
     [TestFixture]
 	public class TestThreadedOptimize:LuceneTestCase
 	{
-		private class AnonymousClassThread:SupportClass.ThreadClass
+		private class AnonymousClassThread:ThreadClass
 		{
 			public AnonymousClassThread(Lucene.Net.Index.IndexWriter writerFinal, int iFinal, int iterFinal, TestThreadedOptimize enclosingInstance)
 			{
@@ -83,7 +83,7 @@ namespace Lucene.Net.Index
 				catch (System.Exception t)
 				{
 					Enclosing_Instance.setFailed();
-					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": hit exception");
+					System.Console.Out.WriteLine(ThreadClass.Current().Name + ": hit exception");
 					System.Console.Out.WriteLine(t.StackTrace);
 				}
 			}
@@ -107,10 +107,10 @@ namespace Lucene.Net.Index
 			failed = true;
 		}
 		
-		public virtual void  runTest(Directory directory, bool autoCommit, MergeScheduler merger)
+		public virtual void  runTest(Directory directory, MergeScheduler merger)
 		{
 			
-			IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
+			IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.SetMaxBufferedDocs(2);
 			if (merger != null)
 				writer.SetMergeScheduler(merger);
@@ -132,9 +132,7 @@ namespace Lucene.Net.Index
 				writer.SetMergeFactor(4);
 				//writer.setInfoStream(System.out);
 				
-				int docCount = writer.DocCount();
-				
-				SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[NUM_THREADS];
+				ThreadClass[] threads = new ThreadClass[NUM_THREADS];
 				
 				for (int i = 0; i < NUM_THREADS; i++)
 				{
@@ -155,16 +153,13 @@ namespace Lucene.Net.Index
 				
 				// System.out.println("TEST: now index=" + writer.segString());
 				
-				Assert.AreEqual(expectedDocCount, writer.DocCount());
-				
-				if (!autoCommit)
-				{
-					writer.Close();
-					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
-					writer.SetMaxBufferedDocs(2);
-				}
+				Assert.AreEqual(expectedDocCount, writer.MaxDoc());
 				
-				IndexReader reader = IndexReader.Open(directory);
+				writer.Close();
+				writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+				writer.SetMaxBufferedDocs(2);
+
+			    IndexReader reader = IndexReader.Open(directory, true);
 				Assert.IsTrue(reader.IsOptimized());
 				Assert.AreEqual(expectedDocCount, reader.NumDocs());
 				reader.Close();
@@ -180,22 +175,18 @@ namespace Lucene.Net.Index
 		public virtual void  TestThreadedOptimize_Renamed()
 		{
 			Directory directory = new MockRAMDirectory();
-			runTest(directory, false, new SerialMergeScheduler());
-			runTest(directory, true, new SerialMergeScheduler());
-			runTest(directory, false, new ConcurrentMergeScheduler());
-			runTest(directory, true, new ConcurrentMergeScheduler());
+			runTest(directory, new SerialMergeScheduler());
+			runTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			
-			System.String tempDir = SupportClass.AppSettings.Get("tempDir", "");
+			System.String tempDir = AppSettings.Get("tempDir", "");
 			if (tempDir == null)
 				throw new System.IO.IOException("tempDir undefined, cannot run test");
 			
 			System.String dirName = tempDir + "/luceneTestThreadedOptimize";
-			directory = FSDirectory.Open(new System.IO.FileInfo(dirName));
-			runTest(directory, false, new SerialMergeScheduler());
-			runTest(directory, true, new SerialMergeScheduler());
-			runTest(directory, false, new ConcurrentMergeScheduler());
-			runTest(directory, true, new ConcurrentMergeScheduler());
+			directory = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
+			runTest(directory, new SerialMergeScheduler());
+			runTest(directory, new ConcurrentMergeScheduler());
 			directory.Close();
 			_TestUtil.RmDir(dirName);
 		}

Modified: incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTransactionRollback.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -51,7 +52,7 @@ namespace Lucene.Net.Index
 			// System.out.println("Attempting to rollback to "+id);
 			System.String ids = "-" + id;
 			IndexCommit last = null;
-			System.Collections.ICollection commits = IndexReader.ListCommits(dir);
+			var commits = IndexReader.ListCommits(dir);
 			for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
 			{
 				IndexCommit commit = (IndexCommit) iterator.Current;
@@ -89,7 +90,7 @@ namespace Lucene.Net.Index
 		
 		private void  CheckExpecteds(System.Collections.BitArray expecteds)
 		{
-			IndexReader r = IndexReader.Open(dir);
+			IndexReader r = IndexReader.Open(dir, true);
 			
 			//Perhaps not the most efficient approach but meets our needs here.
 			for (int i = 0; i < r.MaxDoc(); i++)
@@ -106,7 +107,7 @@ namespace Lucene.Net.Index
 				}
 			}
 			r.Close();
-			Assert.AreEqual(0, SupportClass.BitSetSupport.Cardinality(expecteds), "Should have 0 docs remaining ");
+			Assert.AreEqual(0, BitSetSupport.Cardinality(expecteds), "Should have 0 docs remaining ");
 		}
 		
 		/*
@@ -175,11 +176,11 @@ namespace Lucene.Net.Index
 				this.rollbackPoint = rollbackPoint;
 			}
 			
-			public virtual void  OnCommit(System.Collections.IList commits)
+			public virtual void  OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 			}
-			
-			public virtual void  OnInit(System.Collections.IList commits)
+
+            public virtual void OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
 				for (System.Collections.IEnumerator iterator = commits.GetEnumerator(); iterator.MoveNext(); )
 				{
@@ -231,14 +232,14 @@ namespace Lucene.Net.Index
 				}
 				
 			}
-			
-			public virtual void  OnCommit(System.Collections.IList commits)
+
+            public virtual void OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 			}
 			
-			public virtual void  OnInit(System.Collections.IList commits)
+			public virtual void  OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
-				((IndexCommit) commits[commits.Count - 1]).Delete();
+				commits[commits.Count - 1].Delete();
 			}
 		}
 		
@@ -250,7 +251,7 @@ namespace Lucene.Net.Index
 				// Unless you specify a prior commit point, rollback
 				// should not work:
 				new IndexWriter(dir, new WhitespaceAnalyzer(), new DeleteLastCommitPolicy(this), MaxFieldLength.UNLIMITED).Close();
-				IndexReader r = IndexReader.Open(dir);
+			    IndexReader r = IndexReader.Open(dir, true);
 				Assert.AreEqual(100, r.NumDocs());
 				r.Close();
 			}
@@ -276,10 +277,10 @@ namespace Lucene.Net.Index
 				}
 				
 			}
-			public virtual void  OnCommit(System.Collections.IList commits)
+            public virtual void OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 			}
-			public virtual void  OnInit(System.Collections.IList commits)
+            public virtual void OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
 			}
 		}

Modified: incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestTransactions.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Lucene.Net.Analysis;
@@ -61,7 +61,7 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		abstract public class TimedThread:SupportClass.ThreadClass
+		abstract public class TimedThread:ThreadClass
 		{
 			internal bool failed;
 			private static int RUN_TIME_SEC = 6;
@@ -85,7 +85,7 @@ namespace Lucene.Net.Index
 				}
 				catch (System.Exception e)
 				{
-					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current() + ": exc");
+					System.Console.Out.WriteLine(ThreadClass.Current() + ": exc");
 					System.Console.Out.WriteLine(e.StackTrace);
 					failed = true;
 				}
@@ -225,8 +225,8 @@ namespace Lucene.Net.Index
 				IndexReader r1, r2;
 				lock (lock_Renamed)
 				{
-					r1 = IndexReader.Open(dir1);
-					r2 = IndexReader.Open(dir2);
+					r1 = IndexReader.Open(dir1, true);
+				    r2 = IndexReader.Open(dir2, true);
 				}
 				if (r1.NumDocs() != r2.NumDocs())
 					throw new System.SystemException("doc counts differ: r1=" + r1.NumDocs() + " r2=" + r2.NumDocs());

Modified: incubator/lucene.net/trunk/test/core/Index/TestWordlistLoader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestWordlistLoader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestWordlistLoader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestWordlistLoader.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using NUnit.Framework;
 
 using WordlistLoader = Lucene.Net.Analysis.WordlistLoader;
@@ -33,9 +33,9 @@ namespace Lucene.Net.Index
 		public virtual void  TestWordlistLoading()
 		{
 			System.String s = "ONE\n  two \nthree";
-			System.Collections.Hashtable wordSet1 = WordlistLoader.GetWordSet(new System.IO.StringReader(s));
+			var wordSet1 = WordlistLoader.GetWordSet(new System.IO.StringReader(s));
 			CheckSet(wordSet1);
-			System.Collections.Hashtable wordSet2 = WordlistLoader.GetWordSet(new System.IO.StringReader(s));
+			var wordSet2 = WordlistLoader.GetWordSet(new System.IO.StringReader(s));
 			CheckSet(wordSet2);
 		}
 		
@@ -43,14 +43,14 @@ namespace Lucene.Net.Index
 		public virtual void  TestComments()
 		{
 			System.String s = "ONE\n  two \nthree\n#comment";
-			System.Collections.Hashtable wordSet1 = WordlistLoader.GetWordSet(new System.IO.StringReader(s), "#");
+			var wordSet1 = WordlistLoader.GetWordSet(new System.IO.StringReader(s), "#");
 			CheckSet(wordSet1);
 			Assert.IsFalse(wordSet1.Contains("#comment"));
 			Assert.IsFalse(wordSet1.Contains("comment"));
 		}
 		
 		
-		private void  CheckSet(System.Collections.Hashtable wordset)
+		private void  CheckSet(HashSet<string> wordset)
 		{
 			Assert.AreEqual(3, wordset.Count);
 			Assert.IsTrue(wordset.Contains("ONE")); // case is not modified

Modified: incubator/lucene.net/trunk/test/core/Lucene.Net.Test.csproj
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Lucene.Net.Test.csproj?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Lucene.Net.Test.csproj (original)
+++ incubator/lucene.net/trunk/test/core/Lucene.Net.Test.csproj Tue Feb 28 22:43:08 2012
@@ -19,7 +19,6 @@
  under the License.
 
 -->
-
 <Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
   <PropertyGroup>
     <ProjectType>Local</ProjectType>
@@ -33,8 +32,7 @@
     <AssemblyKeyContainerName>
     </AssemblyKeyContainerName>
     <AssemblyName>Lucene.Net.Test</AssemblyName>
-    <AssemblyOriginatorKeyFile>
-    </AssemblyOriginatorKeyFile>
+    <AssemblyOriginatorKeyFile>Lucene.Net.snk</AssemblyOriginatorKeyFile>
     <DefaultClientScript>JScript</DefaultClientScript>
     <DefaultHTMLPageLayout>Grid</DefaultHTMLPageLayout>
     <DefaultTargetSchema>IE50</DefaultTargetSchema>
@@ -99,7 +97,7 @@
     <DefineConstants>TRACE;SHARP_ZIP_LIB</DefineConstants>
     <DocumentationFile>
     </DocumentationFile>
-    <DebugSymbols>false</DebugSymbols>
+    <DebugSymbols>true</DebugSymbols>
     <FileAlignment>4096</FileAlignment>
     <NoStdLib>false</NoStdLib>
     <NoWarn>618</NoWarn>
@@ -108,11 +106,14 @@
     <RemoveIntegerChecks>false</RemoveIntegerChecks>
     <TreatWarningsAsErrors>false</TreatWarningsAsErrors>
     <WarningLevel>4</WarningLevel>
-    <DebugType>none</DebugType>
+    <DebugType>pdbonly</DebugType>
     <ErrorReport>prompt</ErrorReport>
   </PropertyGroup>
   <PropertyGroup>
-  	<DefineConstants>$(DefineConstants);$(ExternalConstants)</DefineConstants>
+    <DefineConstants>$(DefineConstants);$(ExternalConstants)</DefineConstants>
+  </PropertyGroup>
+  <PropertyGroup>
+    <SignAssembly>true</SignAssembly>
   </PropertyGroup>
   <ItemGroup>
     <Reference Include="ICSharpCode.SharpZipLib, Version=0.85.5.452, Culture=neutral, processorArchitecture=MSIL">
@@ -130,12 +131,14 @@
     <Reference Include="System">
       <Name>System</Name>
     </Reference>
+    <Reference Include="System.Configuration" />
     <Reference Include="System.Data">
       <Name>System.Data</Name>
     </Reference>
     <Reference Include="System.Runtime.Remoting">
       <Name>system.runtime.remoting</Name>
     </Reference>
+    <Reference Include="System.Web.Extensions" />
     <Reference Include="System.Xml">
       <Name>System.XML</Name>
     </Reference>
@@ -149,7 +152,6 @@
     </Compile>
     <Compile Include="Analysis\TestASCIIFoldingFilter.cs" />
     <Compile Include="Analysis\TestCachingTokenFilter.cs" />
-    <Compile Include="Analysis\TestCharacterCache.cs" />
     <Compile Include="Analysis\TestCharArraySet.cs" />
     <Compile Include="Analysis\TestCharFilter.cs" />
     <Compile Include="Analysis\TestISOLatin1AccentFilter.cs">
@@ -176,9 +178,7 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Analysis\TestTeeSinkTokenFilter.cs" />
-    <Compile Include="Analysis\TestTeeTokenFilter.cs" />
     <Compile Include="Analysis\TestToken.cs" />
-    <Compile Include="Analysis\TestTokenStreamBWComp.cs" />
     <Compile Include="AssemblyInfo.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -223,15 +223,13 @@
     <Compile Include="Index\TestFilterIndexReader.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\TestIndexCommit.cs" />
     <Compile Include="Index\TestIndexFileDeleter.cs">
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Index\TestIndexInput.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="Index\TestIndexModifier.cs">
-      <SubType>Code</SubType>
-    </Compile>
     <Compile Include="Index\TestIndexReader.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -329,15 +327,12 @@
     <Compile Include="Search\Function\TestDocValues.cs" />
     <Compile Include="Search\Function\TestFieldScoreQuery.cs" />
     <Compile Include="Search\Function\TestOrdValues.cs" />
-    <Compile Include="Search\Function\TestValueSource.cs" />
     <Compile Include="Search\JustCompileSearch.cs" />
     <Compile Include="Search\MockFilter.cs" />
     <Compile Include="Search\Payloads\PayloadHelper.cs" />
-    <Compile Include="Search\Payloads\TestBoostingTermQuery.cs" />
     <Compile Include="Search\Payloads\TestPayloadNearQuery.cs" />
     <Compile Include="Search\Payloads\TestPayloadTermQuery.cs" />
     <Compile Include="Search\QueryUtils.cs" />
-    <Compile Include="Search\SampleComparable.cs" />
     <Compile Include="Search\SingleDocTestFilter.cs" />
     <Compile Include="Search\Spans\JustCompileSearchSpans.cs" />
     <Compile Include="Search\Spans\TestBasics.cs">
@@ -409,7 +404,6 @@
     <Compile Include="Search\TestQueryWrapperFilter.cs" />
     <Compile Include="Search\TestScoreCachingWrappingScorer.cs" />
     <Compile Include="Search\TestScorerPerf.cs" />
-    <Compile Include="Search\TestSearchHitsWithDeletions.cs" />
     <Compile Include="Search\TestSetNorm.cs" />
     <Compile Include="Search\TestSimilarity.cs" />
     <Compile Include="Search\TestSimpleExplanations.cs" />
@@ -417,13 +411,11 @@
     <Compile Include="Search\TestSloppyPhraseQuery.cs" />
     <Compile Include="Search\TestSort.cs" />
     <Compile Include="Search\TestSpanQueryFilter.cs" />
-    <Compile Include="Search\TestStressSort.cs" />
     <Compile Include="Search\TestTermRangeFilter.cs" />
     <Compile Include="Search\TestTermRangeQuery.cs" />
     <Compile Include="Search\TestTermScorer.cs" />
     <Compile Include="Search\TestTermVectors.cs" />
     <Compile Include="Search\TestThreadSafe.cs" />
-    <Compile Include="Search\TestTimeLimitedCollector.cs" />
     <Compile Include="Search\TestTimeLimitingCollector.cs" />
     <Compile Include="Search\TestTopDocsCollector.cs" />
     <Compile Include="Search\TestTopScoreDocCollector.cs" />
@@ -447,6 +439,7 @@
     <Compile Include="Store\TestLockFactory.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Store\TestMultiMMap.cs" />
     <Compile Include="Store\TestRAMDirectory.cs" />
     <Compile Include="Store\TestWindowsMMap.cs">
       <SubType>Code</SubType>
@@ -455,12 +448,26 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="SupportClassException.cs" />
+    <Compile Include="Support\BigObject.cs" />
+    <Compile Include="Support\CollisionTester.cs" />
+    <Compile Include="Support\SmallObject.cs" />
+    <Compile Include="Support\TestCase.cs" />
+    <Compile Include="Support\TestCloseableThreadLocal.cs" />
+    <Compile Include="Support\TestEquatableList.cs" />
+    <Compile Include="Support\TestHashMap.cs" />
+    <Compile Include="Support\TestIDisposable.cs" />
+    <Compile Include="Support\TestLRUCache.cs" />
+    <Compile Include="Support\TestOldPatches.cs" />
+    <Compile Include="Support\TestOSClass.cs" />
+    <Compile Include="Support\TestSerialization.cs" />
+    <Compile Include="Support\TestThreadClass.cs" />
+    <Compile Include="Support\TestWeakHashTable.cs" />
+    <Compile Include="Support\TestWeakHashTableBehavior.cs" />
+    <Compile Include="Support\TestWeakHashTableMultiThreadAccess.cs" />
+    <Compile Include="Support\TestWeakHashTablePerformance.cs" />
     <Compile Include="TestDemo.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="TestHitIterator.cs">
-      <SubType>Code</SubType>
-    </Compile>
     <Compile Include="TestMergeSchedulerExternal.cs" />
     <Compile Include="TestSearch.cs">
       <SubType>Code</SubType>
@@ -468,7 +475,7 @@
     <Compile Include="TestSearchForDuplicates.cs">
       <SubType>Code</SubType>
     </Compile>
-    <Compile Include="TestSupportClass.cs" />
+    <Compile Include="Support\TestSupportClass.cs" />
     <Compile Include="Util\ArrayUtilTest.cs" />
     <Compile Include="Util\Cache\TestSimpleLRUCache.cs" />
     <Compile Include="Util\English.cs">
@@ -498,6 +505,7 @@
       <SubType>Code</SubType>
     </Compile>
     <Compile Include="Util\TestStringIntern.cs" />
+    <Compile Include="Util\TestVersion.cs" />
     <Compile Include="Util\_TestUtil.cs">
       <SubType>Code</SubType>
     </Compile>
@@ -514,6 +522,9 @@
     <None Include="Index\index.23.nocfs.zip" />
     <None Include="Index\index.24.cfs.zip" />
     <None Include="Index\index.24.nocfs.zip" />
+    <None Include="Index\index.29.cfs.zip" />
+    <None Include="Index\index.29.nocfs.zip" />
+    <None Include="Lucene.Net.snk" />
   </ItemGroup>
   <ItemGroup>
     <EmbeddedResource Include="Messages\MessagesTestBundle.ja.resources">
@@ -555,6 +566,9 @@
       <Install>true</Install>
     </BootstrapperPackage>
   </ItemGroup>
+  <ItemGroup>
+    <Content Include="UpdatedTests.txt" />
+  </ItemGroup>
   <Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
   <PropertyGroup>
     <PreBuildEvent>

Modified: incubator/lucene.net/trunk/test/core/Messages/MessagesTestBundle.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Messages/MessagesTestBundle.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Messages/MessagesTestBundle.cs (original)
+++ incubator/lucene.net/trunk/test/core/Messages/MessagesTestBundle.cs Tue Feb 28 22:43:08 2012
@@ -42,7 +42,7 @@ namespace Lucene.Net.Messages
 			{
 				// register all string ids with NLS class and initialize static string
 				// values
-				NLS.InitializeMessages(BUNDLE_NAME, typeof(MessagesTestBundle));
+                NLS.InitializeMessages<MessagesTestBundle>(BUNDLE_NAME);
 			}
 		}
 	}

Modified: incubator/lucene.net/trunk/test/core/Messages/TestNLS.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Messages/TestNLS.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Messages/TestNLS.cs (original)
+++ incubator/lucene.net/trunk/test/core/Messages/TestNLS.cs Tue Feb 28 22:43:08 2012
@@ -29,14 +29,14 @@ namespace Lucene.Net.Messages
 		[Test]
 		public virtual void  TestMessageLoading()
 		{
-			Message invalidSyntax = new MessageImpl(MessagesTestBundle.Q0001E_INVALID_SYNTAX, new System.Object[]{"XXX"});
+			Message invalidSyntax = new MessageImpl(MessagesTestBundle.Q0001E_INVALID_SYNTAX, "XXX");
             Assert.AreEqual("Syntax Error: XXX", invalidSyntax.GetLocalizedMessage());
 		}
 		
 		[Test]
 		public virtual void  TestMessageLoading_ja()
 		{
-			Message invalidSyntax = new MessageImpl(MessagesTestBundle.Q0001E_INVALID_SYNTAX, new System.Object[]{"XXX"});
+			Message invalidSyntax = new MessageImpl(MessagesTestBundle.Q0001E_INVALID_SYNTAX, "XXX");
 			Assert.AreEqual("構文エラー: XXX", invalidSyntax.GetLocalizedMessage(new System.Globalization.CultureInfo("ja")));
 		}
 		
@@ -46,7 +46,7 @@ namespace Lucene.Net.Messages
 			System.String message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0004E_INVALID_SYNTAX_ESCAPE_UNICODE_TRUNCATION);
 			Assert.AreEqual("Truncated unicode escape sequence.", message);
 			
-			message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0001E_INVALID_SYNTAX, new System.Object[]{"XXX"});
+			message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0001E_INVALID_SYNTAX, "XXX");
 			Assert.AreEqual("Syntax Error: XXX", message);
 		}
 		
@@ -56,7 +56,7 @@ namespace Lucene.Net.Messages
 			System.String message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0004E_INVALID_SYNTAX_ESCAPE_UNICODE_TRUNCATION, new System.Globalization.CultureInfo("ja"));
 			Assert.AreEqual("切り捨てられたユニコード・エスケープ・シーケンス。", message);
 			
-			message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0001E_INVALID_SYNTAX, new System.Globalization.CultureInfo("ja"), new System.Object[]{"XXX"});
+			message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0001E_INVALID_SYNTAX, new System.Globalization.CultureInfo("ja"), "XXX");
 			Assert.AreEqual("構文エラー: XXX", message);
 		}
 		
@@ -75,7 +75,7 @@ namespace Lucene.Net.Messages
 			System.String message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0004E_INVALID_SYNTAX_ESCAPE_UNICODE_TRUNCATION, locale);
 			Assert.AreEqual("Truncated unicode escape sequence.", message);
 			
-			message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0001E_INVALID_SYNTAX, locale, new System.Object[]{"XXX"});
+			message = NLS.GetLocalizedMessage(MessagesTestBundle.Q0001E_INVALID_SYNTAX, locale, "XXX");
 			Assert.AreEqual("Syntax Error: XXX", message);
 		}
 		

Modified: incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/test/core/QueryParser/TestMultiAnalyzer.cs Tue Feb 28 22:43:08 2012
@@ -49,7 +49,7 @@ namespace Lucene.Net.QueryParsers
 		public virtual void  TestMultiAnalyzer_Rename()
 		{
 			
-			QueryParser qp = new QueryParser("", new MultiAnalyzer(this));
+			QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "", new MultiAnalyzer(this));
 			
 			// trivial, no multiple tokens:
 			Assert.AreEqual("foo", qp.Parse("foo").ToString());
@@ -115,7 +115,7 @@ namespace Lucene.Net.QueryParsers
 		[Test]
 		public virtual void  TestPosIncrementAnalyzer()
 		{
-			QueryParser qp = new QueryParser("", new PosIncrementAnalyzer(this));
+            QueryParser qp = new QueryParser(Util.Version.LUCENE_24, "", new PosIncrementAnalyzer(this));
 			Assert.AreEqual("quick brown", qp.Parse("the quick brown").ToString());
 			Assert.AreEqual("\"quick brown\"", qp.Parse("\"the quick brown\"").ToString());
 			Assert.AreEqual("quick brown fox", qp.Parse("the quick brown fox").ToString());
@@ -148,7 +148,7 @@ namespace Lucene.Net.QueryParsers
 			
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
-				TokenStream result = new StandardTokenizer(reader);
+				TokenStream result = new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader);
 				result = new TestFilter(enclosingInstance, result);
 				result = new LowerCaseFilter(result);
 				return result;
@@ -183,10 +183,10 @@ namespace Lucene.Net.QueryParsers
 			public TestFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed):base(in_Renamed)
 			{
 				InitBlock(enclosingInstance);
-				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
-				posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
-				offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
-				typeAtt = (TypeAttribute) AddAttribute(typeof(TypeAttribute));
+				termAtt =  AddAttribute<TermAttribute>();
+				posIncrAtt =  AddAttribute<PositionIncrementAttribute>();
+				offsetAtt =  AddAttribute<OffsetAttribute>();
+				typeAtt =  AddAttribute<TypeAttribute>();
 			}
 			
 			public override bool IncrementToken()
@@ -255,7 +255,7 @@ namespace Lucene.Net.QueryParsers
 			
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
-				TokenStream result = new StandardTokenizer(reader);
+				TokenStream result = new StandardTokenizer(Util.Version.LUCENE_CURRENT, reader);
 				result = new TestPosIncrementFilter(enclosingInstance, result);
 				result = new LowerCaseFilter(result);
 				return result;
@@ -284,8 +284,8 @@ namespace Lucene.Net.QueryParsers
 			public TestPosIncrementFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed):base(in_Renamed)
 			{
 				InitBlock(enclosingInstance);
-				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
-				posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
+				termAtt =  AddAttribute<TermAttribute>();
+				posIncrAtt =  AddAttribute<PositionIncrementAttribute>();
 			}
 			
 			public override bool IncrementToken()
@@ -315,7 +315,7 @@ namespace Lucene.Net.QueryParsers
 		private sealed class DumbQueryParser:QueryParser
 		{
 			
-			public DumbQueryParser(System.String f, Analyzer a):base(f, a)
+			public DumbQueryParser(System.String f, Analyzer a):base(Util.Version.LUCENE_CURRENT, f, a)
 			{
 			}
 			



Mime
View raw message