lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1294875 [34/45] - in /incubator/lucene.net/trunk: ./ build/ build/vs2010/contrib/ build/vs2010/test/ doc/ src/ src/contrib/Analyzers/ src/contrib/Analyzers/AR/ src/contrib/Analyzers/BR/ src/contrib/Analyzers/CJK/ src/contrib/Analyzers/Cn/ ...
Date Tue, 28 Feb 2012 22:43:28 GMT
Modified: incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestBackwardsCompatibility.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,10 @@
  */
 
 using System;
-
+using System.IO;
+using System.Linq;
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -62,6 +65,13 @@ namespace Lucene.Net.Index
 		}
 		*/
 
+        public class AnonymousFieldSelector : FieldSelector
+        {
+            public FieldSelectorResult Accept(string fieldName)
+            {
+                return ("compressed".Equals(fieldName)) ? FieldSelectorResult.SIZE : FieldSelectorResult.LOAD;
+            }
+        }
 
 		
 		/* Unzips dirName + ".zip" --> dirName, removing dirName
@@ -119,25 +129,124 @@ namespace Lucene.Net.Index
 			RmDir(dirName);
 		}
 
-        internal System.String[] oldNames = new System.String[]{"19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs", "21.nocfs", "22.cfs", "22.nocfs", "23.cfs", "23.nocfs", "24.cfs", "24.nocfs","30.cfs","30.nocfs"};
-		
-		[Test]
+	    internal string[] oldNames = new []
+	                                            {
+	                                                "19.cfs", "19.nocfs", "20.cfs", "20.nocfs", "21.cfs", "21.nocfs", "22.cfs", 
+                                                    "22.nocfs", "23.cfs", "23.nocfs", "24.cfs", "24.nocfs", "29.cfs",
+	                                                "29.nocfs"
+	                                            };
+
+        [Test]
+        private void assertCompressedFields29(Directory dir, bool shouldStillBeCompressed)
+        {
+            int count = 0;
+            int TEXT_PLAIN_LENGTH = TEXT_TO_COMPRESS.Length*2;
+            // FieldSelectorResult.SIZE returns 2*number_of_chars for String fields:
+            int BINARY_PLAIN_LENGTH = BINARY_TO_COMPRESS.Length;
+
+            IndexReader reader = IndexReader.Open(dir, true);
+            try
+            {
+                // look into sub readers and check if raw merge is on/off
+                var readers = new System.Collections.Generic.List<IndexReader>();
+                ReaderUtil.GatherSubReaders(readers, reader);
+                foreach (IndexReader ir in readers)
+                {
+                    FieldsReader fr = ((SegmentReader) ir).GetFieldsReader();
+                    Assert.IsTrue(shouldStillBeCompressed != fr.CanReadRawDocs(),
+                                  "for a 2.9 index, FieldsReader.canReadRawDocs() must be false and other way round for a trunk index");
+                }
+
+                // test that decompression works correctly
+                for (int i = 0; i < reader.MaxDoc(); i++)
+                {
+                    if (!reader.IsDeleted(i))
+                    {
+                        Document d = reader.Document(i);
+                        if (d.Get("content3") != null) continue;
+                        count++;
+                        Fieldable compressed = d.GetFieldable("compressed");
+                        if (int.Parse(d.Get("id"))%2 == 0)
+                        {
+                            Assert.IsFalse(compressed.IsBinary());
+                            Assert.AreEqual(TEXT_TO_COMPRESS, compressed.StringValue(),
+                                            "incorrectly decompressed string");
+                        }
+                        else
+                        {
+                            Assert.IsTrue(compressed.IsBinary());
+                            Assert.IsTrue(BINARY_TO_COMPRESS.SequenceEqual(compressed.GetBinaryValue()),
+                                          "incorrectly decompressed binary");
+                        }
+                    }
+                }
+
+                //check if field was decompressed after optimize
+                for (int i = 0; i < reader.MaxDoc(); i++)
+                {
+                    if (!reader.IsDeleted(i))
+                    {
+                        Document d = reader.Document(i, new AnonymousFieldSelector());
+                        if (d.Get("content3") != null) continue;
+                        count++;
+                        // read the size from the binary value using BinaryReader (this prevents us from doing the shift ops ourselves):
+                        // ugh, Java uses Big-Endian streams, so we need to do it manually.
+                        byte[] encodedSize = d.GetFieldable("compressed").GetBinaryValue().Take(4).Reverse().ToArray();
+                        int actualSize = BitConverter.ToInt32(encodedSize, 0);
+                        int compressedSize = int.Parse(d.Get("compressedSize"));
+                        bool binary = int.Parse(d.Get("id"))%2 > 0;
+                        int shouldSize = shouldStillBeCompressed
+                                             ? compressedSize
+                                             : (binary ? BINARY_PLAIN_LENGTH : TEXT_PLAIN_LENGTH);
+                        Assert.AreEqual(shouldSize, actualSize, "size incorrect");
+                        if (!shouldStillBeCompressed)
+                        {
+                            Assert.IsFalse(compressedSize == actualSize,
+                                           "uncompressed field should have another size than recorded in index");
+                        }
+                    }
+                }
+                Assert.AreEqual(34*2, count, "correct number of tests");
+            }
+            finally
+            {
+                reader.Dispose();
+            }
+        }
+
+	    [Test]
 		public virtual void  TestOptimizeOldIndex()
-		{
+	    {
+	        int hasTested29 = 0;
 			for (int i = 0; i < oldNames.Length; i++)
 			{
 				System.String dirName = Paths.CombinePath(Paths.ProjectRootDirectory, "test/core/index/index." + oldNames[i]);
 				Unzip(dirName, oldNames[i]);
 				System.String fullPath = FullDir(oldNames[i]);
-				Directory dir = FSDirectory.Open(new System.IO.FileInfo(fullPath));
+				Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(fullPath));
+
+                if (oldNames[i].StartsWith("29."))
+                {
+                    assertCompressedFields29(dir, true);
+                    hasTested29++;
+                }
+
 				IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
 				w.Optimize();
 				w.Close();
 				
 				_TestUtil.CheckIndex(dir);
+
+                if (oldNames[i].StartsWith("29."))
+                {
+                    assertCompressedFields29(dir, false);
+                    hasTested29++;
+                }
+
 				dir.Close();
 				RmDir(oldNames[i]);
 			}
+            Assert.AreEqual(4, hasTested29, "test for compressed field should have run 4 times");
 		}
 		
 		[Test]
@@ -159,11 +268,7 @@ namespace Lucene.Net.Index
 			{
                 System.String dirName = Paths.CombinePath(Paths.ProjectRootDirectory, "test/core/index/index." + oldNames[i]);
 				Unzip(dirName, oldNames[i]);
-				ChangeIndexNoAdds(oldNames[i], true);
-				RmDir(oldNames[i]);
-				
-				Unzip(dirName, oldNames[i]);
-				ChangeIndexNoAdds(oldNames[i], false);
+				ChangeIndexNoAdds(oldNames[i]);
 				RmDir(oldNames[i]);
 			}
 		}
@@ -175,11 +280,7 @@ namespace Lucene.Net.Index
 			{
                 System.String dirName = Paths.CombinePath(Paths.ProjectRootDirectory, "test/core/index/index." + oldNames[i]);
 				Unzip(dirName, oldNames[i]);
-				ChangeIndexWithAdds(oldNames[i], true);
-				RmDir(oldNames[i]);
-				
-				Unzip(dirName, oldNames[i]);
-				ChangeIndexWithAdds(oldNames[i], false);
+				ChangeIndexWithAdds(oldNames[i]);
 				RmDir(oldNames[i]);
 			}
 		}
@@ -202,8 +303,8 @@ namespace Lucene.Net.Index
 			
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
-			IndexSearcher searcher = new IndexSearcher(dir);
+			Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
+			IndexSearcher searcher = new IndexSearcher(dir, true);
 			IndexReader reader = searcher.GetIndexReader();
 			
 			_TestUtil.CheckIndex(dir);
@@ -213,14 +314,14 @@ namespace Lucene.Net.Index
 				if (!reader.IsDeleted(i))
 				{
 					Document d = reader.Document(i);
-					System.Collections.IList fields = d.GetFields();
+					var fields = d.GetFields();
 					if (!oldName.StartsWith("19.") && !oldName.StartsWith("20.") && !oldName.StartsWith("21.") && !oldName.StartsWith("22."))
 					{
-						
 						if (d.GetField("content3") == null)
 						{
-							Assert.AreEqual(5, fields.Count);
-							Field f = (Field) d.GetField("id");
+						    int numFields = oldName.StartsWith("29.") ? 7 : 5;
+							Assert.AreEqual(numFields, fields.Count);
+							Field f = d.GetField("id");
 							Assert.AreEqual("" + i, f.StringValue());
 							
 							f = (Field) d.GetField("utf8");
@@ -275,15 +376,15 @@ namespace Lucene.Net.Index
 		
 		/* Open pre-lockless index, add docs, do a delete &
 		* setNorm, and search */
-		public virtual void  ChangeIndexWithAdds(System.String dirName, bool autoCommit)
+		public virtual void  ChangeIndexWithAdds(System.String dirName)
 		{
 			System.String origDirName = dirName;
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
+			Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
 			
 			// open writer
-			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
 			
 			// add 10 docs
 			for (int i = 0; i < 10; i++)
@@ -301,11 +402,11 @@ namespace Lucene.Net.Index
 			{
 				expected = 46;
 			}
-			Assert.AreEqual(expected, writer.DocCount(), "wrong doc count");
+			Assert.AreEqual(expected, writer.MaxDoc(), "wrong doc count");
 			writer.Close();
 			
 			// make sure searching sees right # hits
-			IndexSearcher searcher = new IndexSearcher(dir);
+			IndexSearcher searcher = new IndexSearcher(dir, true);
 			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Document d = searcher.Doc(hits[0].doc);
 			Assert.AreEqual("21", d.Get("id"), "wrong first document");
@@ -314,7 +415,7 @@ namespace Lucene.Net.Index
 			
 			// make sure we can do delete & setNorm against this
 			// pre-lockless segment:
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			Term searchTerm = new Term("id", "6");
 			int delCount = reader.DeleteDocuments(searchTerm);
 			Assert.AreEqual(1, delCount, "wrong delete count");
@@ -322,7 +423,7 @@ namespace Lucene.Net.Index
 			reader.Close();
 			
 			// make sure they "took":
-			searcher = new IndexSearcher(dir);
+			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(43, hits.Length, "wrong number of hits");
 			d = searcher.Doc(hits[0].doc);
@@ -331,11 +432,11 @@ namespace Lucene.Net.Index
 			searcher.Close();
 			
 			// optimize
-			writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
+			writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.Optimize();
 			writer.Close();
 			
-			searcher = new IndexSearcher(dir);
+			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(43, hits.Length, "wrong number of hits");
 			d = searcher.Doc(hits[0].doc);
@@ -348,15 +449,14 @@ namespace Lucene.Net.Index
 		
 		/* Open pre-lockless index, add docs, do a delete &
 		* setNorm, and search */
-		public virtual void  ChangeIndexNoAdds(System.String dirName, bool autoCommit)
+		public virtual void  ChangeIndexNoAdds(System.String dirName)
 		{
-			
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
+			Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
 			
 			// make sure searching sees right # hits
-			IndexSearcher searcher = new IndexSearcher(dir);
+			IndexSearcher searcher = new IndexSearcher(dir, true);
 			ScoreDoc[] hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(34, hits.Length, "wrong number of hits");
 			Document d = searcher.Doc(hits[0].doc);
@@ -365,7 +465,7 @@ namespace Lucene.Net.Index
 			
 			// make sure we can do a delete & setNorm against this
 			// pre-lockless segment:
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			Term searchTerm = new Term("id", "6");
 			int delCount = reader.DeleteDocuments(searchTerm);
 			Assert.AreEqual(1, delCount, "wrong delete count");
@@ -373,7 +473,7 @@ namespace Lucene.Net.Index
 			reader.Close();
 			
 			// make sure they "took":
-			searcher = new IndexSearcher(dir);
+			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(33, hits.Length, "wrong number of hits");
 			d = searcher.Doc(hits[0].doc);
@@ -382,11 +482,11 @@ namespace Lucene.Net.Index
 			searcher.Close();
 			
 			// optimize
-			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
+			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.Optimize();
 			writer.Close();
 			
-			searcher = new IndexSearcher(dir);
+			searcher = new IndexSearcher(dir, true);
 			hits = searcher.Search(new TermQuery(new Term("content", "aaa")), null, 1000).ScoreDocs;
 			Assert.AreEqual(33, hits.Length, "wrong number of hits");
 			d = searcher.Doc(hits[0].doc);
@@ -404,7 +504,7 @@ namespace Lucene.Net.Index
 			
 			dirName = FullDir(dirName);
 			
-			Directory dir = FSDirectory.Open(new System.IO.FileInfo(dirName));
+			Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(dirName));
 			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetUseCompoundFile(doCFS);
 			writer.SetMaxBufferedDocs(10);
@@ -413,7 +513,7 @@ namespace Lucene.Net.Index
 			{
 				AddDoc(writer, i);
 			}
-			Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
+			Assert.AreEqual(35, writer.MaxDoc(), "wrong doc count");
 			writer.Close();
 			
 			// open fresh writer so we get no prx file in the added segment
@@ -424,7 +524,7 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			// Delete one doc so we get a .del file:
-			IndexReader reader = IndexReader.Open(dir);
+		    IndexReader reader = IndexReader.Open(dir, false);
 			Term searchTerm = new Term("id", "7");
 			int delCount = reader.DeleteDocuments(searchTerm);
 			Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
@@ -435,84 +535,81 @@ namespace Lucene.Net.Index
 		}
 		
 		/* Verifies that the expected file names were produced */
-		
-		[Test]
-		public virtual void  TestExactFileNames()
-		{
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				
-				System.String outputDir = "lucene.backwardscompat0.index";
-				RmDir(outputDir);
-				
-				try
-				{
-					Directory dir = FSDirectory.Open(new System.IO.FileInfo(FullDir(outputDir)));
-					
-					bool autoCommit = 0 == pass;
-					
-					IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
-					writer.SetRAMBufferSizeMB(16.0);
-					for (int i = 0; i < 35; i++)
-					{
-						AddDoc(writer, i);
-					}
-					Assert.AreEqual(35, writer.DocCount(), "wrong doc count");
-					writer.Close();
-					
-					// Delete one doc so we get a .del file:
-					IndexReader reader = IndexReader.Open(dir);
-					Term searchTerm = new Term("id", "7");
-					int delCount = reader.DeleteDocuments(searchTerm);
-					Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
-					
-					// Set one norm so we get a .s0 file:
-					reader.SetNorm(21, "content", (float) 1.5);
-					reader.Close();
-					
-					// The numbering of fields can vary depending on which
-					// JRE is in use.  On some JREs we see content bound to
-					// field 0; on others, field 1.  So, here we have to
-					// figure out which field number corresponds to
-					// "content", and then set our expected file names below
-					// accordingly:
-					CompoundFileReader cfsReader = new CompoundFileReader(dir, "_0.cfs");
-					FieldInfos fieldInfos = new FieldInfos(cfsReader, "_0.fnm");
-					int contentFieldIndex = - 1;
-					for (int i = 0; i < fieldInfos.Size(); i++)
-					{
-						FieldInfo fi = fieldInfos.FieldInfo(i);
-						if (fi.name_ForNUnit.Equals("content"))
-						{
-							contentFieldIndex = i;
-							break;
-						}
-					}
-					cfsReader.Close();
-					Assert.IsTrue(contentFieldIndex != - 1, "could not locate the 'content' field number in the _2.cfs segment");
-					
-					// Now verify file names:
-					System.String[] expected;
-					expected = new System.String[]{"_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_3", "segments.gen"};
-					
-					System.String[] actual = dir.ListAll();
-					System.Array.Sort(expected);
-					System.Array.Sort(actual);
-					if (!SupportClass.CollectionsHelper.Equals(expected, actual))
-					{
-						Assert.Fail("incorrect filenames in index: expected:\n    " + AsString(expected) + "\n  actual:\n    " + AsString(actual));
-					}
-					dir.Close();
-				}
-				finally
-				{
-					RmDir(outputDir);
-				}
-			}
-		}
-		
-		private System.String AsString(System.String[] l)
+
+        [Test]
+        public virtual void TestExactFileNames()
+        {
+            System.String outputDir = "lucene.backwardscompat0.index";
+            RmDir(outputDir);
+
+            try
+            {
+                Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo(FullDir(outputDir)));
+
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
+                                                     IndexWriter.MaxFieldLength.UNLIMITED);
+                writer.SetRAMBufferSizeMB(16.0);
+                for (int i = 0; i < 35; i++)
+                {
+                    AddDoc(writer, i);
+                }
+                Assert.AreEqual(35, writer.MaxDoc(), "wrong doc count");
+                writer.Close();
+
+                // Delete one doc so we get a .del file:
+                IndexReader reader = IndexReader.Open(dir, false);
+                Term searchTerm = new Term("id", "7");
+                int delCount = reader.DeleteDocuments(searchTerm);
+                Assert.AreEqual(1, delCount, "didn't delete the right number of documents");
+
+                // Set one norm so we get a .s0 file:
+                reader.SetNorm(21, "content", (float) 1.5);
+                reader.Close();
+
+                // The numbering of fields can vary depending on which
+                // JRE is in use.  On some JREs we see content bound to
+                // field 0; on others, field 1.  So, here we have to
+                // figure out which field number corresponds to
+                // "content", and then set our expected file names below
+                // accordingly:
+                CompoundFileReader cfsReader = new CompoundFileReader(dir, "_0.cfs");
+                FieldInfos fieldInfos = new FieldInfos(cfsReader, "_0.fnm");
+                int contentFieldIndex = -1;
+                for (int i = 0; i < fieldInfos.Size(); i++)
+                {
+                    FieldInfo fi = fieldInfos.FieldInfo(i);
+                    if (fi.name_ForNUnit.Equals("content"))
+                    {
+                        contentFieldIndex = i;
+                        break;
+                    }
+                }
+                cfsReader.Close();
+                Assert.IsTrue(contentFieldIndex != -1,
+                              "could not locate the 'content' field number in the _2.cfs segment");
+
+                // Now verify file names:
+                System.String[] expected;
+                expected = new System.String[]
+                               {"_0.cfs", "_0_1.del", "_0_1.s" + contentFieldIndex, "segments_3", "segments.gen"};
+
+                System.String[] actual = dir.ListAll();
+                System.Array.Sort(expected);
+                System.Array.Sort(actual);
+                if (!CollectionsHelper.Equals(expected, actual))
+                {
+                    Assert.Fail("incorrect filenames in index: expected:\n    " + AsString(expected) +
+                                "\n  actual:\n    " + AsString(actual));
+                }
+                dir.Close();
+            }
+            finally
+            {
+                RmDir(outputDir);
+            }
+        }
+
+	    private System.String AsString(System.String[] l)
 		{
 			System.String s = "";
 			for (int i = 0; i < l.Length; i++)
@@ -534,7 +631,8 @@ namespace Lucene.Net.Index
 			doc.Add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
 			doc.Add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
 			doc.Add(new Field("content2", "here is more content with aaa aaa aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
-			doc.Add(new Field("fie\u2C77ld", "field with non-ascii name", Field.Store.YES, Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+			doc.Add(new Field("fie\u2C77ld", "field with non-ascii name", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
+            /* This was used in 2.9 to generate an index with compressed field:
 			if (id % 2 == 0)
 			{
 				doc.Add(new Field("compressed", TEXT_TO_COMPRESS, Field.Store.COMPRESS, Field.Index.NOT_ANALYZED));
@@ -544,7 +642,10 @@ namespace Lucene.Net.Index
 			{
 				doc.Add(new Field("compressed", BINARY_TO_COMPRESS, Field.Store.COMPRESS));
 				doc.Add(new Field("compressedSize", System.Convert.ToString(BINARY_COMPRESSED_LENGTH), Field.Store.YES, Field.Index.NOT_ANALYZED));
-			}
+			}*/
+            // Add numeric fields, to test if flex preserves encoding
+		    doc.Add(new NumericField("trieInt", 4).SetIntValue(id));
+            doc.Add(new NumericField("trieLong", 4).SetLongValue(id));
 			writer.AddDocument(doc);
 		}
 		
@@ -552,10 +653,10 @@ namespace Lucene.Net.Index
 		{
 			Document doc = new Document();
 			Field f = new Field("content3", "aaa", Field.Store.YES, Field.Index.ANALYZED);
-			f.SetOmitTf(true);
+			f.SetOmitTermFreqAndPositions(true);
 			doc.Add(f);
 			f = new Field("content4", "aaa", Field.Store.YES, Field.Index.NO);
-			f.SetOmitTf(true);
+			f.SetOmitTermFreqAndPositions(true);
 			doc.Add(f);
 			writer.AddDocument(doc);
 		}
@@ -570,7 +671,7 @@ namespace Lucene.Net.Index
 				tmpBool = System.IO.Directory.Exists(fileDir.FullName);
 			if (tmpBool)
 			{
-				System.IO.FileInfo[] files = SupportClass.FileSupport.GetFiles(fileDir);
+				System.IO.FileInfo[] files = FileSupport.GetFiles(fileDir);
 				if (files != null)
 				{
 					for (int i = 0; i < files.Length; i++)
@@ -610,7 +711,7 @@ namespace Lucene.Net.Index
 		
 		public static System.String FullDir(System.String dirName)
 		{
-			return new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), dirName)).FullName;
+			return new System.IO.FileInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), dirName)).FullName;
 		}
 		
 		internal const System.String TEXT_TO_COMPRESS = "this is a compressed field and should appear in 3.0 as an uncompressed field after merge";
@@ -618,22 +719,23 @@ namespace Lucene.Net.Index
 		// which are internally handled as binary;
 		// do it in the same way like FieldsWriter, do not use
 		// CompressionTools.compressString() for compressed fields:
-		internal static int TEXT_COMPRESSED_LENGTH;
-		
 		internal static readonly byte[] BINARY_TO_COMPRESS = new byte[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
-		internal static readonly int BINARY_COMPRESSED_LENGTH = CompressionTools.Compress(BINARY_TO_COMPRESS).Length;
-		static TestBackwardsCompatibility()
-		{
-			{
-				try
-				{
-					TEXT_COMPRESSED_LENGTH = CompressionTools.Compress(System.Text.Encoding.GetEncoding("UTF-8").GetBytes(TEXT_TO_COMPRESS)).Length;
-				}
-				catch (System.Exception e)
-				{
-					throw new System.SystemException();
-				}
-			}
-		}
+
+        /* This was used in 2.9 to generate an index with compressed field
+        internal static int TEXT_COMPRESSED_LENGTH;
+        internal static readonly int BINARY_COMPRESSED_LENGTH = CompressionTools.Compress(BINARY_TO_COMPRESS).Length;
+        static TestBackwardsCompatibility()
+        {
+            {
+                try
+                {
+                    TEXT_COMPRESSED_LENGTH = CompressionTools.Compress(System.Text.Encoding.GetEncoding("UTF-8").GetBytes(TEXT_TO_COMPRESS)).Length;
+                }
+                catch (System.Exception e)
+                {
+                    throw new System.SystemException();
+                }
+            }
+        }*/
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/test/core/Index/TestByteSlices.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestByteSlices.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestByteSlices.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestByteSlices.cs Tue Feb 28 22:43:08 2012
@@ -64,13 +64,13 @@ namespace Lucene.Net.Index
                 }
             }
 
-            public override void RecycleByteBlocks(System.Collections.ArrayList blocks)
+            public override void RecycleByteBlocks(System.Collections.Generic.IList<byte[]> blocks)
             {
                 lock (this)
                 {
                     int size = blocks.Count;
                     for (int i = 0; i < size; i++)
-                        freeByteBlocks.Add((byte[])blocks[i]);
+                        freeByteBlocks.Add(blocks[i]);
                 }
             }
         }

Modified: incubator/lucene.net/trunk/test/core/Index/TestCheckIndex.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestCheckIndex.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestCheckIndex.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestCheckIndex.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -46,7 +46,7 @@ namespace Lucene.Net.Index
 				writer.AddDocument(doc);
 			}
 			writer.Close();
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			reader.DeleteDocument(5);
 			reader.Close();
 			
@@ -93,7 +93,7 @@ namespace Lucene.Net.Index
 			Assert.AreEqual(18, seg.termVectorStatus.totVectors);
 			
 			Assert.IsTrue(seg.diagnostics.Count > 0);
-			System.Collections.IList onlySegments = new System.Collections.ArrayList();
+			List<string> onlySegments = new List<string>();
 			onlySegments.Add("_0");
 			
 			Assert.IsTrue(checker.CheckIndex_Renamed_Method(onlySegments).clean == true);

Modified: incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestCompoundFile.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using Directory = Lucene.Net.Store.Directory;
@@ -29,11 +29,6 @@ using _TestUtil = Lucene.Net.Util._TestU
 
 namespace Lucene.Net.Index
 {
-	
-	
-	/// <summary> </summary>
-	/// <version>  $Id: TestCompoundFile.java 780770 2009-06-01 18:34:10Z uschindler $
-	/// </version>
 	[TestFixture]
 	public class TestCompoundFile:LuceneTestCase
 	{
@@ -63,7 +58,7 @@ namespace Lucene.Net.Index
 		public override void  SetUp()
 		{
 			base.SetUp();
-			System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "testIndex"));
+			System.IO.DirectoryInfo file = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "testIndex"));
 			_TestUtil.RmDir(file);
 			// use a simple FSDir here, to be sure to have SimpleFSInputs
 			dir = new SimpleFSDirectory(file, null);

Modified: incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestConcurrentMergeScheduler.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Threading;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -38,11 +38,13 @@ namespace Lucene.Net.Index
 		
 		private class FailOnlyOnFlush:MockRAMDirectory.Failure
 		{
-			internal bool doFail = false;
+			internal bool doFail;
+		    internal bool hitExc;
 			
 			public virtual void  SetDoFail()
 			{
-				this.doFail = true;
+                this.doFail = true;
+                hitExc = false;
 			}
 			public virtual void  ClearDoFail()
 			{
@@ -51,7 +53,7 @@ namespace Lucene.Net.Index
 			
 			public override void  Eval(MockRAMDirectory dir)
 			{
-				if (doFail)
+				if (doFail)// && Thread.CurrentThread.Name.Equals("main")) // TODO: This does not work -cc
 				{
 					System.Diagnostics.StackTrace trace = new System.Diagnostics.StackTrace();
 					for (int i = 0; i < trace.FrameCount; i++)
@@ -59,6 +61,7 @@ namespace Lucene.Net.Index
 						System.Diagnostics.StackFrame sf = trace.GetFrame(i);
 						if ("DoFlush".Equals(sf.GetMethod().Name))
 						{
+						    hitExc = true;
 							//new RuntimeException().printStackTrace(System.out);
 							throw new System.IO.IOException("now failing during flush");
 						}
@@ -76,14 +79,16 @@ namespace Lucene.Net.Index
 			MockRAMDirectory directory = new MockRAMDirectory();
 			FailOnlyOnFlush failure = new FailOnlyOnFlush();
 			directory.FailOn(failure);
-			
-			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
+
+            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
 			writer.SetMergeScheduler(cms);
 			writer.SetMaxBufferedDocs(2);
 			Document doc = new Document();
 			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
 			doc.Add(idField);
+		    int extraCount = 0;
+
 			for (int i = 0; i < 10; i++)
 			{
 				for (int j = 0; j < 20; j++)
@@ -91,24 +96,32 @@ namespace Lucene.Net.Index
 					idField.SetValue(System.Convert.ToString(i * 20 + j));
 					writer.AddDocument(doc);
 				}
-				
-				writer.AddDocument(doc);
-				
-				failure.SetDoFail();
-				try
-				{
-					writer.Flush();
-					Assert.Fail("failed to hit IOException");
-				}
-				catch (System.IO.IOException ioe)
-				{
-					failure.ClearDoFail();
-				}
+
+                while (true)
+                {
+                    // must cycle here because sometimes the merge flushes
+                    // the doc we just added and so there's nothing to
+                    // flush, and we don't hit the exception
+                    writer.AddDocument(doc);
+                    failure.SetDoFail();
+                    try
+                    {
+                        writer.Flush(true, false, true);
+                        if(failure.hitExc)
+                            Assert.Fail("failed to hit IOException");
+                        extraCount++;
+                    }
+                    catch (System.IO.IOException ioe)
+                    {
+                        failure.ClearDoFail();
+                        break;
+                    }
+                }
 			}
 			
 			writer.Close();
-			IndexReader reader = IndexReader.Open(directory);
-			Assert.AreEqual(200, reader.NumDocs());
+			IndexReader reader = IndexReader.Open(directory, true);
+			Assert.AreEqual(200+extraCount, reader.NumDocs());
 			reader.Close();
 			directory.Close();
 		}
@@ -120,8 +133,8 @@ namespace Lucene.Net.Index
 		{
 			
 			RAMDirectory directory = new MockRAMDirectory();
-			
-			IndexWriter writer = new IndexWriter(directory, true, ANALYZER, true);
+
+            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 			ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
 			writer.SetMergeScheduler(cms);
 			
@@ -151,108 +164,94 @@ namespace Lucene.Net.Index
 					delID += 10;
 				}
 				
-				writer.Flush();
+				writer.Commit();
 			}
 			
 			writer.Close();
-			IndexReader reader = IndexReader.Open(directory);
+			IndexReader reader = IndexReader.Open(directory, true);
 			// Verify that we did not lose any deletes...
 			Assert.AreEqual(450, reader.NumDocs());
 			reader.Close();
 			directory.Close();
 		}
-		
-		[Test]
-		public virtual void  TestNoExtraFiles()
-		{
-			
-			RAMDirectory directory = new MockRAMDirectory();
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				
-				bool autoCommit = pass == 0;
-				IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
-				
-				for (int iter = 0; iter < 7; iter++)
-				{
-					ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
-					writer.SetMergeScheduler(cms);
-					writer.SetMaxBufferedDocs(2);
-					
-					for (int j = 0; j < 21; j++)
-					{
-						Document doc = new Document();
-						doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.ANALYZED));
-						writer.AddDocument(doc);
-					}
-					
-					writer.Close();
-					TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles autoCommit=" + autoCommit);
-					
-					// Reopen
-					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
-				}
-				
-				writer.Close();
-			}
-			
-			directory.Close();
-		}
-		
-		[Test]
-		public virtual void  TestNoWaitClose()
-		{
-			RAMDirectory directory = new MockRAMDirectory();
-			
-			Document doc = new Document();
-			Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
-			doc.Add(idField);
-			
-			for (int pass = 0; pass < 2; pass++)
-			{
-				bool autoCommit = pass == 0;
-				IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
-				
-				for (int iter = 0; iter < 10; iter++)
-				{
-					ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
-					writer.SetMergeScheduler(cms);
-					writer.SetMaxBufferedDocs(2);
-					writer.SetMergeFactor(100);
-					
-					for (int j = 0; j < 201; j++)
-					{
-						idField.SetValue(System.Convert.ToString(iter * 201 + j));
-						writer.AddDocument(doc);
-					}
-					
-					int delID = iter * 201;
-					for (int j = 0; j < 20; j++)
-					{
-						writer.DeleteDocuments(new Term("id", System.Convert.ToString(delID)));
-						delID += 5;
-					}
-					
-					// Force a bunch of merge threads to kick off so we
-					// stress out aborting them on close:
-					writer.SetMergeFactor(3);
-					writer.AddDocument(doc);
-					writer.Flush();
-					
-					writer.Close(false);
-					
-					IndexReader reader = IndexReader.Open(directory);
-					Assert.AreEqual((1 + iter) * 182, reader.NumDocs());
-					reader.Close();
-					
-					// Reopen
-					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
-				}
-				writer.Close();
-			}
-			
-			directory.Close();
-		}
+
+        [Test]
+        public virtual void TestNoExtraFiles()
+        {
+            RAMDirectory directory = new MockRAMDirectory();
+            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+
+            for (int iter = 0; iter < 7; iter++)
+            {
+                ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+                writer.SetMergeScheduler(cms);
+                writer.SetMaxBufferedDocs(2);
+
+                for (int j = 0; j < 21; j++)
+                {
+                    Document doc = new Document();
+                    doc.Add(new Field("content", "a b c", Field.Store.NO, Field.Index.ANALYZED));
+                    writer.AddDocument(doc);
+                }
+
+                writer.Close();
+                TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles");
+                // Reopen
+                writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+            }
+            writer.Close();
+            directory.Close();
+        }
+
+        [Test]
+        public virtual void TestNoWaitClose()
+        {
+            RAMDirectory directory = new MockRAMDirectory();
+
+            Document doc = new Document();
+            Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
+            doc.Add(idField);
+
+            IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+
+            for (int iter = 0; iter < 10; iter++)
+            {
+                ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+                writer.SetMergeScheduler(cms);
+                writer.SetMaxBufferedDocs(2);
+                writer.SetMergeFactor(100);
+
+                for (int j = 0; j < 201; j++)
+                {
+                    idField.SetValue(System.Convert.ToString(iter*201 + j));
+                    writer.AddDocument(doc);
+                }
+
+                int delID = iter*201;
+                for (int j = 0; j < 20; j++)
+                {
+                    writer.DeleteDocuments(new Term("id", delID.ToString()));
+                    delID += 5;
+                }
+
+                // Force a bunch of merge threads to kick off so we
+                // stress out aborting them on close:
+                writer.SetMergeFactor(3);
+                writer.AddDocument(doc);
+                writer.Commit();
+
+                writer.Close(false);
+
+                IndexReader reader = IndexReader.Open(directory, true);
+                Assert.AreEqual((1 + iter)*182, reader.NumDocs());
+                reader.Close();
+
+                // Reopen
+                writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+            }
+            writer.Close();
+
+            directory.Close();
+        }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/test/core/Index/TestCrash.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestCrash.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestCrash.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestCrash.cs Tue Feb 28 22:43:08 2012
@@ -41,8 +41,8 @@ namespace Lucene.Net.Index
 		private IndexWriter InitIndex(MockRAMDirectory dir)
 		{
 			dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
-			
-			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
+
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
 			//writer.setMaxBufferedDocs(2);
 			writer.SetMaxBufferedDocs(10);
 			((ConcurrentMergeScheduler) writer.GetMergeScheduler()).SetSuppressExceptions();
@@ -71,7 +71,7 @@ namespace Lucene.Net.Index
 			IndexWriter writer = InitIndex();
 			MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
 			Crash(writer);
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, true);
 			Assert.IsTrue(reader.NumDocs() < 157);
 		}
 		
@@ -85,7 +85,7 @@ namespace Lucene.Net.Index
 			writer = InitIndex(dir);
 			writer.Close();
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			Assert.IsTrue(reader.NumDocs() < 314);
 		}
 		
@@ -96,7 +96,7 @@ namespace Lucene.Net.Index
 			MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
 			writer.Close();
 			writer = InitIndex(dir);
-			Assert.AreEqual(314, writer.DocCount());
+			Assert.AreEqual(314, writer.MaxDoc());
 			Crash(writer);
 			
 			/*
@@ -108,7 +108,7 @@ namespace Lucene.Net.Index
 			dir.fileLength(l[i]) + " bytes");
 			*/
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			Assert.IsTrue(reader.NumDocs() >= 157);
 		}
 		
@@ -129,7 +129,7 @@ namespace Lucene.Net.Index
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			Assert.AreEqual(157, reader.NumDocs());
 		}
 		
@@ -150,7 +150,7 @@ namespace Lucene.Net.Index
 			for(int i=0;i<l.length;i++)
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			Assert.AreEqual(157, reader.NumDocs());
 		}
 		
@@ -162,7 +162,7 @@ namespace Lucene.Net.Index
 			MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
 			
 			writer.Close(false);
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			reader.DeleteDocument(3);
 			
 			dir.Crash();
@@ -173,7 +173,7 @@ namespace Lucene.Net.Index
 			for(int i=0;i<l.length;i++)
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
-			reader = IndexReader.Open(dir);
+			reader = IndexReader.Open(dir, false);
 			Assert.AreEqual(157, reader.NumDocs());
 		}
 		
@@ -185,7 +185,7 @@ namespace Lucene.Net.Index
 			MockRAMDirectory dir = (MockRAMDirectory) writer.GetDirectory();
 			
 			writer.Close(false);
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, false);
 			reader.DeleteDocument(3);
 			reader.Close();
 			
@@ -197,7 +197,7 @@ namespace Lucene.Net.Index
 			for(int i=0;i<l.length;i++)
 			System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
 			*/
-			reader = IndexReader.Open(dir);
+			reader = IndexReader.Open(dir, false);
 			Assert.AreEqual(156, reader.NumDocs());
 		}
 	}

Modified: incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDeletionPolicy.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -39,18 +39,18 @@ namespace Lucene.Net.Index
 	against it, and add documents to it.*/
 	
 	[TestFixture]
-	public class TestDeletionPolicy:LuceneTestCase
+	public class TestDeletionPolicy : LuceneTestCase
 	{
-		private void  VerifyCommitOrder(System.Collections.IList commits)
+		private void  VerifyCommitOrder<T>(IList<T> commits) where T : IndexCommit
 		{
-			IndexCommit firstCommit = ((IndexCommit) commits[0]);
+			IndexCommit firstCommit = commits[0];
 			long last = SegmentInfos.GenerationFromSegmentsFileName(firstCommit.GetSegmentsFileName());
 			Assert.AreEqual(last, firstCommit.GetGeneration());
 			long lastVersion = firstCommit.GetVersion();
 			long lastTimestamp = firstCommit.GetTimestamp();
 			for (int i = 1; i < commits.Count; i++)
 			{
-				IndexCommit commit = ((IndexCommit) commits[i]);
+				IndexCommit commit = commits[i];
 				long now = SegmentInfos.GenerationFromSegmentsFileName(commit.GetSegmentsFileName());
 				long nowVersion = commit.GetVersion();
 				long nowTimestamp = commit.GetTimestamp();
@@ -86,15 +86,15 @@ namespace Lucene.Net.Index
 			internal int numOnInit;
 			internal int numOnCommit;
 			internal Directory dir;
-			public virtual void  OnInit(System.Collections.IList commits)
+			public virtual void  OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnInit++;
 			}
-			public virtual void  OnCommit(System.Collections.IList commits)
+			public virtual void  OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 				IndexCommit lastCommit = (IndexCommit) commits[commits.Count - 1];
-				IndexReader r = IndexReader.Open(dir);
+				IndexReader r = IndexReader.Open(dir, true);
 				Assert.AreEqual(r.IsOptimized(), lastCommit.IsOptimized(), "lastCommit.isOptimized()=" + lastCommit.IsOptimized() + " vs IndexReader.isOptimized=" + r.IsOptimized());
 				r.Close();
 				Enclosing_Instance.VerifyCommitOrder(commits);
@@ -102,8 +102,8 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		/// <summary> This is useful for adding to a big index w/ autoCommit
-		/// false when you know readers are not using it.
+		/// <summary> This is useful for adding to a big index when you know
+		/// readers are not using it.
 		/// </summary>
 		internal class KeepNoneOnInitDeletionPolicy : IndexDeletionPolicy
 		{
@@ -126,7 +126,7 @@ namespace Lucene.Net.Index
 			}
 			internal int numOnInit;
 			internal int numOnCommit;
-			public virtual void  OnInit(System.Collections.IList commits)
+			public virtual void  OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnInit++;
@@ -139,7 +139,7 @@ namespace Lucene.Net.Index
 					Assert.IsTrue(commit.IsDeleted());
 				}
 			}
-			public virtual void  OnCommit(System.Collections.IList commits)
+			public virtual void  OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				int size = commits.Count;
@@ -179,7 +179,7 @@ namespace Lucene.Net.Index
 				this.numToKeep = numToKeep;
 			}
 			
-			public virtual void  OnInit(System.Collections.IList commits)
+			public virtual void  OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				numOnInit++;
@@ -187,20 +187,20 @@ namespace Lucene.Net.Index
 				DoDeletes(commits, false);
 			}
 			
-			public virtual void  OnCommit(System.Collections.IList commits)
+			public virtual void  OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				DoDeletes(commits, true);
 			}
 			
-			private void  DoDeletes(System.Collections.IList commits, bool isCommit)
+			private void  DoDeletes<T>(IList<T> commits, bool isCommit) where T : IndexCommit
 			{
 				
 				// Assert that we really are only called for each new
 				// commit:
 				if (isCommit)
 				{
-					System.String fileName = ((IndexCommit) commits[commits.Count - 1]).GetSegmentsFileName();
+					System.String fileName = commits[commits.Count - 1].GetSegmentsFileName();
 					if (seen.Contains(fileName))
 					{
 						throw new System.SystemException("onCommit was called twice on the same commit point: " + fileName);
@@ -248,17 +248,17 @@ namespace Lucene.Net.Index
 				this.expirationTimeSeconds = seconds;
 			}
 			
-			public virtual void  OnInit(System.Collections.IList commits)
+			public virtual void  OnInit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				OnCommit(commits);
 			}
 			
-			public virtual void  OnCommit(System.Collections.IList commits)
+			public virtual void  OnCommit<T>(IList<T> commits) where T : IndexCommit
 			{
 				Enclosing_Instance.VerifyCommitOrder(commits);
 				
-				IndexCommit lastCommit = (IndexCommit) commits[commits.Count - 1];
+				IndexCommit lastCommit = commits[commits.Count - 1];
 				
 				// Any commit older than expireTime should be deleted:
 				double expireTime = dir.FileModified(lastCommit.GetSegmentsFileName()) / 1000.0 - expirationTimeSeconds;
@@ -284,15 +284,13 @@ namespace Lucene.Net.Index
 		[Test]
 		public virtual void  TestExpirationTimeDeletionPolicy()
 		{
-			
 			double SECONDS = 2.0;
 			
-			bool autoCommit = false;
 			bool useCompoundFile = true;
 			
 			Directory dir = new RAMDirectory();
 			ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(this, dir, SECONDS);
-			IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+            IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 			writer.SetUseCompoundFile(useCompoundFile);
 			writer.Close();
 			
@@ -302,7 +300,7 @@ namespace Lucene.Net.Index
 				// Record last time when writer performed deletes of
 				// past commits
 				lastDeleteTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
-				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+                writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetUseCompoundFile(useCompoundFile);
 				for (int j = 0; j < 17; j++)
 				{
@@ -330,7 +328,7 @@ namespace Lucene.Net.Index
 			{
 				try
 				{
-					IndexReader reader = IndexReader.Open(dir);
+					IndexReader reader = IndexReader.Open(dir, true);
 					reader.Close();
 					fileName = IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
 					long modTime = dir.FileModified(fileName);
@@ -356,57 +354,48 @@ namespace Lucene.Net.Index
 		public virtual void  TestKeepAllDeletionPolicy()
 		{
 			
-			for (int pass = 0; pass < 4; pass++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				
-				bool autoCommit = pass < 2;
-				bool useCompoundFile = (pass % 2) > 0;
+				bool useCompoundFile = (pass % 2) != 0;
 				
 				// Never deletes a commit
 				KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(this);
 				
 				Directory dir = new RAMDirectory();
 				policy.dir = dir;
-				
-				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(10);
 				writer.SetUseCompoundFile(useCompoundFile);
 				writer.SetMergeScheduler(new SerialMergeScheduler());
 				for (int i = 0; i < 107; i++)
 				{
 					AddDoc(writer);
-					if (autoCommit && i % 10 == 0)
-						writer.Commit();
 				}
 				writer.Close();
-				
-				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+
+                writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetUseCompoundFile(useCompoundFile);
 				writer.Optimize();
 				writer.Close();
 				
 				Assert.AreEqual(2, policy.numOnInit);
-				if (!autoCommit)
+
 				// If we are not auto committing then there should
 				// be exactly 2 commits (one per close above):
-					Assert.AreEqual(2, policy.numOnCommit);
+				Assert.AreEqual(2, policy.numOnCommit);
 				
 				// Test listCommits
-				System.Collections.ICollection commits = IndexReader.ListCommits(dir);
-				if (!autoCommit)
+				ICollection<IndexCommit> commits = IndexReader.ListCommits(dir);
 				// 1 from opening writer + 2 from closing writer
-					Assert.AreEqual(3, commits.Count);
-				// 1 from opening writer + 2 from closing writer +
-				// 11 from calling writer.commit() explicitly above
-				else
-					Assert.AreEqual(14, commits.Count);
+				Assert.AreEqual(3, commits.Count);
 				
 				System.Collections.IEnumerator it = commits.GetEnumerator();
 				// Make sure we can open a reader on each commit:
 				while (it.MoveNext())
 				{
 					IndexCommit commit = (IndexCommit) it.Current;
-					IndexReader r = IndexReader.Open(commit, null);
+					IndexReader r = IndexReader.Open(commit, null, false);
 					r.Close();
 				}
 				
@@ -416,7 +405,7 @@ namespace Lucene.Net.Index
 				long gen = SegmentInfos.GetCurrentSegmentGeneration(dir);
 				while (gen > 0)
 				{
-					IndexReader reader = IndexReader.Open(dir);
+					IndexReader reader = IndexReader.Open(dir, true);
 					reader.Close();
 					dir.DeleteFile(IndexFileNames.FileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
 					gen--;
@@ -462,7 +451,7 @@ namespace Lucene.Net.Index
 			}
 			writer.Close();
 			
-			System.Collections.ICollection commits = IndexReader.ListCommits(dir);
+			ICollection<IndexCommit> commits = IndexReader.ListCommits(dir);
 			Assert.AreEqual(6, commits.Count);
 			IndexCommit lastCommit = null;
 			System.Collections.IEnumerator it = commits.GetEnumerator();
@@ -490,7 +479,7 @@ namespace Lucene.Net.Index
 			// Should undo our rollback:
 			writer.Rollback();
 			
-			IndexReader r = IndexReader.Open(dir);
+			IndexReader r = IndexReader.Open(dir, true);
 			// Still optimized, still 11 docs
 			Assert.IsTrue(r.IsOptimized());
 			Assert.AreEqual(11, r.NumDocs());
@@ -504,7 +493,7 @@ namespace Lucene.Net.Index
 			// Now 8 because we made another commit
 			Assert.AreEqual(8, IndexReader.ListCommits(dir).Count);
 			
-			r = IndexReader.Open(dir);
+			r = IndexReader.Open(dir, true);
 			// Not optimized because we rolled it back, and now only
 			// 10 docs
 			Assert.IsTrue(!r.IsOptimized());
@@ -516,7 +505,7 @@ namespace Lucene.Net.Index
 			writer.Optimize();
 			writer.Close();
 			
-			r = IndexReader.Open(dir);
+			r = IndexReader.Open(dir, true);
 			Assert.IsTrue(r.IsOptimized());
 			Assert.AreEqual(10, r.NumDocs());
 			r.Close();
@@ -528,7 +517,7 @@ namespace Lucene.Net.Index
 			
 			// Reader still sees optimized index, because writer
 			// opened on the prior commit has not yet committed:
-			r = IndexReader.Open(dir);
+			r = IndexReader.Open(dir, true);
 			Assert.IsTrue(r.IsOptimized());
 			Assert.AreEqual(10, r.NumDocs());
 			r.Close();
@@ -536,7 +525,7 @@ namespace Lucene.Net.Index
 			writer.Close();
 			
 			// Now reader sees unoptimized index:
-			r = IndexReader.Open(dir);
+			r = IndexReader.Open(dir, true);
 			Assert.IsTrue(!r.IsOptimized());
 			Assert.AreEqual(10, r.NumDocs());
 			r.Close();
@@ -546,24 +535,21 @@ namespace Lucene.Net.Index
 		
 		
 		/* Test keeping NO commit points.  This is a viable and
-		* useful case eg where you want to build a big index with
-		* autoCommit false and you know there are no readers.
+		* useful case eg where you want to build a big index and
+		* you know there are no readers.
 		*/
 		[Test]
 		public virtual void  TestKeepNoneOnInitDeletionPolicy()
 		{
-			
-			for (int pass = 0; pass < 4; pass++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				
-				bool autoCommit = pass < 2;
-				bool useCompoundFile = (pass % 2) > 0;
+				bool useCompoundFile = (pass % 2) != 0;
 				
 				KeepNoneOnInitDeletionPolicy policy = new KeepNoneOnInitDeletionPolicy(this);
 				
 				Directory dir = new RAMDirectory();
-				
-				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(10);
 				writer.SetUseCompoundFile(useCompoundFile);
 				for (int i = 0; i < 107; i++)
@@ -571,21 +557,20 @@ namespace Lucene.Net.Index
 					AddDoc(writer);
 				}
 				writer.Close();
-				
-				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+
+                writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetUseCompoundFile(useCompoundFile);
 				writer.Optimize();
 				writer.Close();
 				
 				Assert.AreEqual(2, policy.numOnInit);
-				if (!autoCommit)
 				// If we are not auto committing then there should
 				// be exactly 2 commits (one per close above):
-					Assert.AreEqual(2, policy.numOnCommit);
+				Assert.AreEqual(2, policy.numOnCommit);
 				
 				// Simplistic check: just verify the index is in fact
 				// readable:
-				IndexReader reader = IndexReader.Open(dir);
+				IndexReader reader = IndexReader.Open(dir, true);
 				reader.Close();
 				
 				dir.Close();
@@ -598,14 +583,11 @@ namespace Lucene.Net.Index
 		[Test]
 		public virtual void  TestKeepLastNDeletionPolicy()
 		{
-			
 			int N = 5;
 			
-			for (int pass = 0; pass < 4; pass++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				
-				bool autoCommit = pass < 2;
-				bool useCompoundFile = (pass % 2) > 0;
+				bool useCompoundFile = (pass % 2) != 0;
 				
 				Directory dir = new RAMDirectory();
 				
@@ -613,7 +595,7 @@ namespace Lucene.Net.Index
 				
 				for (int j = 0; j < N + 1; j++)
 				{
-					IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+                    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 					writer.SetMaxBufferedDocs(10);
 					writer.SetUseCompoundFile(useCompoundFile);
 					for (int i = 0; i < 17; i++)
@@ -626,14 +608,7 @@ namespace Lucene.Net.Index
 				
 				Assert.IsTrue(policy.numDelete > 0);
 				Assert.AreEqual(N + 1, policy.numOnInit);
-				if (autoCommit)
-				{
-					Assert.IsTrue(policy.numOnCommit > 1);
-				}
-				else
-				{
-					Assert.AreEqual(N + 1, policy.numOnCommit);
-				}
+				Assert.AreEqual(N + 1, policy.numOnCommit);
 				
 				// Simplistic check: just verify only the past N segments_N's still
 				// exist, and, I can open a reader on each:
@@ -643,7 +618,7 @@ namespace Lucene.Net.Index
 				{
 					try
 					{
-						IndexReader reader = IndexReader.Open(dir);
+						IndexReader reader = IndexReader.Open(dir, true);
 						reader.Close();
 						if (i == N)
 						{
@@ -675,19 +650,16 @@ namespace Lucene.Net.Index
 		[Test]
 		public virtual void  TestKeepLastNDeletionPolicyWithReader()
 		{
-			
 			int N = 10;
 			
-			for (int pass = 0; pass < 4; pass++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				
-				bool autoCommit = pass < 2;
-				bool useCompoundFile = (pass % 2) > 0;
+				bool useCompoundFile = (pass % 2) != 0;
 				
 				KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
 				
 				Directory dir = new RAMDirectory();
-				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetUseCompoundFile(useCompoundFile);
 				writer.Close();
 				Term searchTerm = new Term("content", "aaa");
@@ -695,35 +667,34 @@ namespace Lucene.Net.Index
 				
 				for (int i = 0; i < N + 1; i++)
 				{
-					writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+                    writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 					writer.SetUseCompoundFile(useCompoundFile);
 					for (int j = 0; j < 17; j++)
 					{
 						AddDoc(writer);
 					}
-					// this is a commit when autoCommit=false:
+					// this is a commit
 					writer.Close();
-					IndexReader reader = IndexReader.Open(dir, policy);
+					IndexReader reader = IndexReader.Open(dir, policy, false);
 					reader.DeleteDocument(3 * i + 1);
 					reader.SetNorm(4 * i + 1, "content", 2.0F);
 					IndexSearcher searcher = new IndexSearcher(reader);
 					ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
 					Assert.AreEqual(16 * (1 + i), hits.Length);
-					// this is a commit when autoCommit=false:
+					// this is a commit
 					reader.Close();
 					searcher.Close();
 				}
-				writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+                writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetUseCompoundFile(useCompoundFile);
 				writer.Optimize();
-				// this is a commit when autoCommit=false:
+				// this is a commit
 				writer.Close();
 				
 				Assert.AreEqual(2 * (N + 2), policy.numOnInit);
-				if (!autoCommit)
-					Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit);
+				Assert.AreEqual(2 * (N + 2) - 1, policy.numOnCommit);
 				
-				IndexSearcher searcher2 = new IndexSearcher(dir);
+				IndexSearcher searcher2 = new IndexSearcher(dir, false);
 				ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
 				Assert.AreEqual(176, hits2.Length);
 				
@@ -738,29 +709,25 @@ namespace Lucene.Net.Index
 				{
 					try
 					{
-						IndexReader reader = IndexReader.Open(dir);
+						IndexReader reader = IndexReader.Open(dir, true);
 						
 						// Work backwards in commits on what the expected
-						// count should be.  Only check this in the
-						// autoCommit false case:
-						if (!autoCommit)
+						// count should be.
+						searcher2 = new IndexSearcher(reader);
+						hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
+						if (i > 1)
 						{
-							searcher2 = new IndexSearcher(reader);
-							hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
-							if (i > 1)
+							if (i % 2 == 0)
 							{
-								if (i % 2 == 0)
-								{
-									expectedCount += 1;
-								}
-								else
-								{
-									expectedCount -= 17;
-								}
+								expectedCount += 1;
+							}
+							else
+							{
+								expectedCount -= 17;
 							}
-							Assert.AreEqual(expectedCount, hits2.Length);
-							searcher2.Close();
 						}
+						Assert.AreEqual(expectedCount, hits2.Length);
+						searcher2.Close();
 						reader.Close();
 						if (i == N)
 						{
@@ -792,19 +759,16 @@ namespace Lucene.Net.Index
 		[Test]
 		public virtual void  TestKeepLastNDeletionPolicyWithCreates()
 		{
-			
 			int N = 10;
 			
-			for (int pass = 0; pass < 4; pass++)
+			for (int pass = 0; pass < 2; pass++)
 			{
-				
-				bool autoCommit = pass < 2;
-				bool useCompoundFile = (pass % 2) > 0;
+				bool useCompoundFile = (pass % 2) != 0;
 				
 				KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(this, N);
 				
 				Directory dir = new RAMDirectory();
-				IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+                IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 				writer.SetMaxBufferedDocs(10);
 				writer.SetUseCompoundFile(useCompoundFile);
 				writer.Close();
@@ -813,37 +777,36 @@ namespace Lucene.Net.Index
 				
 				for (int i = 0; i < N + 1; i++)
 				{
-					
-					writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
+
+                    writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 					writer.SetMaxBufferedDocs(10);
 					writer.SetUseCompoundFile(useCompoundFile);
 					for (int j = 0; j < 17; j++)
 					{
 						AddDoc(writer);
 					}
-					// this is a commit when autoCommit=false:
+					// this is a commit
 					writer.Close();
-					IndexReader reader = IndexReader.Open(dir, policy);
+					IndexReader reader = IndexReader.Open(dir, policy, false);
 					reader.DeleteDocument(3);
 					reader.SetNorm(5, "content", 2.0F);
 					IndexSearcher searcher = new IndexSearcher(reader);
 					ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs;
 					Assert.AreEqual(16, hits.Length);
-					// this is a commit when autoCommit=false:
+					// this is a commit
 					reader.Close();
 					searcher.Close();
-					
-					writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
+
+                    writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
 					// This will not commit: there are no changes
 					// pending because we opened for "create":
 					writer.Close();
 				}
 				
 				Assert.AreEqual(1 + 3 * (N + 1), policy.numOnInit);
-				if (!autoCommit)
-					Assert.AreEqual(3 * (N + 1), policy.numOnCommit);
+				Assert.AreEqual(3 * (N + 1), policy.numOnCommit);
 				
-				IndexSearcher searcher2 = new IndexSearcher(dir);
+				IndexSearcher searcher2 = new IndexSearcher(dir, false);
 				ScoreDoc[] hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
 				Assert.AreEqual(0, hits2.Length);
 				
@@ -858,29 +821,25 @@ namespace Lucene.Net.Index
 				{
 					try
 					{
-						IndexReader reader = IndexReader.Open(dir);
+						IndexReader reader = IndexReader.Open(dir, true);
 						
 						// Work backwards in commits on what the expected
-						// count should be.  Only check this in the
-						// autoCommit false case:
-						if (!autoCommit)
+						// count should be.
+						searcher2 = new IndexSearcher(reader);
+						hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
+						Assert.AreEqual(expectedCount, hits2.Length);
+						searcher2.Close();
+						if (expectedCount == 0)
 						{
-							searcher2 = new IndexSearcher(reader);
-							hits2 = searcher2.Search(query, null, 1000).ScoreDocs;
-							Assert.AreEqual(expectedCount, hits2.Length);
-							searcher2.Close();
-							if (expectedCount == 0)
-							{
-								expectedCount = 16;
-							}
-							else if (expectedCount == 16)
-							{
-								expectedCount = 17;
-							}
-							else if (expectedCount == 17)
-							{
-								expectedCount = 0;
-							}
+							expectedCount = 16;
+						}
+						else if (expectedCount == 16)
+						{
+							expectedCount = 17;
+						}
+						else if (expectedCount == 17)
+						{
+							expectedCount = 0;
 						}
 						reader.Close();
 						if (i == N)

Modified: incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDirectoryReader.cs Tue Feb 28 22:43:08 2012
@@ -65,7 +65,7 @@ namespace Lucene.Net.Index
 		protected internal virtual IndexReader OpenReader()
 		{
 			IndexReader reader;
-			reader = IndexReader.Open(dir);
+			reader = IndexReader.Open(dir, false);
 			Assert.IsTrue(reader is DirectoryReader);
 			
 			Assert.IsTrue(dir != null);
@@ -151,7 +151,7 @@ namespace Lucene.Net.Index
 			AddDoc(ramDir1, "test foo", true);
 			RAMDirectory ramDir2 = new RAMDirectory();
 			AddDoc(ramDir2, "test blah", true);
-			IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2)};
+			IndexReader[] readers = new IndexReader[]{IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false)};
 			MultiReader mr = new MultiReader(readers);
 			Assert.IsTrue(mr.IsCurrent()); // just opened, must be current
 			AddDoc(ramDir1, "more text", false);
@@ -179,9 +179,9 @@ namespace Lucene.Net.Index
 			AddDoc(ramDir2, "test blah", true);
 			RAMDirectory ramDir3 = new RAMDirectory();
 			AddDoc(ramDir3, "test wow", true);
-			
-			IndexReader[] readers1 = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir3)};
-			IndexReader[] readers2 = new IndexReader[]{IndexReader.Open(ramDir1), IndexReader.Open(ramDir2), IndexReader.Open(ramDir3)};
+
+            IndexReader[] readers1 = new [] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir3, false) };
+            IndexReader[] readers2 = new [] { IndexReader.Open(ramDir1, false), IndexReader.Open(ramDir2, false), IndexReader.Open(ramDir3, false) };
 			MultiReader mr2 = new MultiReader(readers1);
 			MultiReader mr3 = new MultiReader(readers2);
 			
@@ -221,7 +221,7 @@ namespace Lucene.Net.Index
 		
 		private void  AddDoc(RAMDirectory ramDir1, System.String s, bool create)
 		{
-			IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(), create, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
 			Document doc = new Document();
 			doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
 			iw.AddDocument(doc);

Modified: incubator/lucene.net/trunk/test/core/Index/TestDoc.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDoc.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDoc.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDoc.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using Lucene.Net.Documents;
+using Lucene.Net.Support;
 using NUnit.Framework;
 
 using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
@@ -30,11 +31,7 @@ namespace Lucene.Net.Index
 {
 	
 	
-	/// <summary>JUnit adaptation of an older test case DocTest.
-	/// 
-	/// </summary>
-	/// <version>  $Id: TestDoc.java 780770 2009-06-01 18:34:10Z uschindler $
-	/// </version>
+	/// <summary>JUnit adaptation of an older test case DocTest.</summary>
 	[TestFixture]
 	public class TestDoc:LuceneTestCase
 	{
@@ -47,8 +44,8 @@ namespace Lucene.Net.Index
 		}
 		
 		
-		private System.IO.FileInfo workDir;
-		private System.IO.FileInfo indexDir;
+		private System.IO.DirectoryInfo workDir;
+		private System.IO.DirectoryInfo indexDir;
 		private System.Collections.ArrayList files;
 		
 		
@@ -59,10 +56,10 @@ namespace Lucene.Net.Index
 		public override void  SetUp()
 		{
 			base.SetUp();
-			workDir = new System.IO.FileInfo(System.IO.Path.Combine(SupportClass.AppSettings.Get("tempDir", ""), "TestDoc"));
+			workDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(AppSettings.Get("tempDir", ""), "TestDoc"));
 			System.IO.Directory.CreateDirectory(workDir.FullName);
 			
-			indexDir = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, "testIndex"));
+			indexDir = new System.IO.DirectoryInfo(System.IO.Path.Combine(workDir.FullName, "testIndex"));
 			System.IO.Directory.CreateDirectory(indexDir.FullName);
 			
 			Directory directory = FSDirectory.Open(indexDir);
@@ -74,14 +71,14 @@ namespace Lucene.Net.Index
 			files.Add(CreateOutput("test2.txt", "This is the second test file"));
 		}
 		
-		private System.IO.FileInfo CreateOutput(System.String name, System.String text)
+		private System.IO.DirectoryInfo CreateOutput(System.String name, System.String text)
 		{
 			System.IO.StreamWriter fw = null;
 			System.IO.StreamWriter pw = null;
 			
 			try
 			{
-				System.IO.FileInfo f = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, name));
+				System.IO.DirectoryInfo f = new System.IO.DirectoryInfo(System.IO.Path.Combine(workDir.FullName, name));
 				bool tmpBool;
 				if (System.IO.File.Exists(f.FullName))
 					tmpBool = true;
@@ -191,18 +188,19 @@ namespace Lucene.Net.Index
 		
 		private SegmentInfo IndexDoc(IndexWriter writer, System.String fileName)
 		{
-			System.IO.FileInfo file = new System.IO.FileInfo(System.IO.Path.Combine(workDir.FullName, fileName));
+			System.IO.DirectoryInfo file = new System.IO.DirectoryInfo(System.IO.Path.Combine(workDir.FullName, fileName));
 			Document doc = FileDocument.Document(file);
+            doc.Add(new Field("contents", new System.IO.StreamReader(file.FullName)));
 			writer.AddDocument(doc);
-			writer.Flush();
+			writer.Commit();
 			return writer.NewestSegment();
 		}
 		
 		
 		private SegmentInfo Merge(SegmentInfo si1, SegmentInfo si2, System.String merged, bool useCompoundFile)
 		{
-			SegmentReader r1 = SegmentReader.Get(si1);
-			SegmentReader r2 = SegmentReader.Get(si2);
+            SegmentReader r1 = SegmentReader.Get(true, si1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+            SegmentReader r2 = SegmentReader.Get(true, si2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			
 			SegmentMerger merger = new SegmentMerger(si1.dir, merged);
 			
@@ -226,7 +224,7 @@ namespace Lucene.Net.Index
 		
 		private void  PrintSegment(System.IO.StreamWriter out_Renamed, SegmentInfo si)
 		{
-			SegmentReader reader = SegmentReader.Get(si);
+			SegmentReader reader = SegmentReader.Get(true, si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			
 			for (int i = 0; i < reader.NumDocs(); i++)
 			{

Modified: incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs (original)
+++ incubator/lucene.net/trunk/test/core/Index/TestDocumentWriter.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Util;
 using NUnit.Framework;
 
 using Analyzer = Lucene.Net.Analysis.Analyzer;
@@ -44,7 +44,7 @@ namespace Lucene.Net.Index
 {
 	
 	[TestFixture]
-	public class TestDocumentWriter:BaseTokenStreamTestCase
+	public class TestDocumentWriter : LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer:Analyzer
 		{
@@ -86,9 +86,9 @@ namespace Lucene.Net.Index
 				private void  InitBlock(AnonymousClassAnalyzer1 enclosingInstance)
 				{
 					this.enclosingInstance = enclosingInstance;
-					termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
-					payloadAtt = (PayloadAttribute) AddAttribute(typeof(PayloadAttribute));
-					posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
+                    termAtt = AddAttribute<TermAttribute>();
+                    payloadAtt = AddAttribute<PayloadAttribute>();
+                    posIncrAtt = AddAttribute<PositionIncrementAttribute>();
 				}
 				private AnonymousClassAnalyzer1 enclosingInstance;
 				public AnonymousClassAnalyzer1 Enclosing_Instance
@@ -168,7 +168,7 @@ namespace Lucene.Net.Index
 			private void  InitBlock(TestDocumentWriter enclosingInstance)
 			{
 				this.enclosingInstance = enclosingInstance;
-				termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+                termAtt = AddAttribute<TermAttribute>();
 			}
 			private TestDocumentWriter enclosingInstance;
 			public TestDocumentWriter Enclosing_Instance
@@ -197,6 +197,11 @@ namespace Lucene.Net.Index
 					return true;
 				}
 			}
+
+		    protected override void Dispose(bool disposing)
+		    {
+		        // Do Nothing
+		    }
 		}
 		private RAMDirectory dir;
 		
@@ -229,11 +234,11 @@ namespace Lucene.Net.Index
 			Analyzer analyzer = new WhitespaceAnalyzer();
 			IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(testDoc);
-			writer.Flush();
+			writer.Commit();
 			SegmentInfo info = writer.NewestSegment();
 			writer.Close();
 			//After adding the document, we should be able to read it back in
-			SegmentReader reader = SegmentReader.Get(info);
+            SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			Assert.IsTrue(reader != null);
 			Document doc = reader.Document(0);
 			Assert.IsTrue(doc != null);
@@ -285,10 +290,10 @@ namespace Lucene.Net.Index
 			doc.Add(new Field("repeated", "repeated two", Field.Store.YES, Field.Index.ANALYZED));
 			
 			writer.AddDocument(doc);
-			writer.Flush();
+			writer.Commit();
 			SegmentInfo info = writer.NewestSegment();
 			writer.Close();
-			SegmentReader reader = SegmentReader.Get(info);
+            SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			
 			TermPositions termPositions = reader.TermPositions(new Term("repeated", "repeated"));
 			Assert.IsTrue(termPositions.Next());
@@ -309,10 +314,10 @@ namespace Lucene.Net.Index
 			doc.Add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
 			
 			writer.AddDocument(doc);
-			writer.Flush();
+			writer.Commit();
 			SegmentInfo info = writer.NewestSegment();
 			writer.Close();
-			SegmentReader reader = SegmentReader.Get(info);
+            SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			
 			TermPositions termPositions = reader.TermPositions(new Term("f1", "a"));
 			Assert.IsTrue(termPositions.Next());
@@ -336,10 +341,10 @@ namespace Lucene.Net.Index
 			doc.Add(new Field("preanalyzed", new AnonymousClassTokenStream(this), TermVector.NO));
 			
 			writer.AddDocument(doc);
-			writer.Flush();
+			writer.Commit();
 			SegmentInfo info = writer.NewestSegment();
 			writer.Close();
-			SegmentReader reader = SegmentReader.Get(info);
+            SegmentReader reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 			
 			TermPositions termPositions = reader.TermPositions(new Term("preanalyzed", "term1"));
 			Assert.IsTrue(termPositions.Next());
@@ -371,14 +376,15 @@ namespace Lucene.Net.Index
 			// f2 first with tv then without tv
 			doc.Add(new Field("f2", "v1", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
 			doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NOT_ANALYZED, TermVector.NO));
-			
-			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+
+		    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true,
+		                                         IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(doc);
 			writer.Close();
 			
 			_TestUtil.CheckIndex(dir);
 			
-			IndexReader reader = IndexReader.Open(dir);
+			IndexReader reader = IndexReader.Open(dir, true);
 			// f1
 			TermFreqVector tfv1 = reader.GetTermFreqVector(0, "f1");
 			Assert.IsNotNull(tfv1);
@@ -406,7 +412,7 @@ namespace Lucene.Net.Index
 			doc.Add(f);
 			doc.Add(new Field("f2", "v2", Field.Store.YES, Field.Index.NO));
 			
-			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.AddDocument(doc);
 			writer.Optimize(); // be sure to have a single segment
 			writer.Close();



Mime
View raw message