lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r677059 [11/19] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/...
Date Tue, 15 Jul 2008 21:44:10 GMT
Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestTerm.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTerm.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTerm.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTerm.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Index
+{
+	
+	[TestFixture]
+	public class TestTerm : LuceneTestCase
+	{
+		
+		[Test]
+		public virtual void  TestEquals()
+		{
+			Term base_Renamed = new Term("same", "same");
+			Term same = new Term("same", "same");
+			Term differentField = new Term("different", "same");
+			Term differentText = new Term("same", "different");
+			System.String differentType = "AString";
+			Assert.AreEqual(base_Renamed, base_Renamed);
+			Assert.AreEqual(base_Renamed, same);
+			Assert.IsFalse(base_Renamed.Equals(differentField));
+			Assert.IsFalse(base_Renamed.Equals(differentText));
+			Assert.IsFalse(base_Renamed.Equals(differentType));
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsReader.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs Tue Jul 15 14:44:04 2008
@@ -19,118 +19,230 @@
 
 using NUnit.Framework;
 
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using Token = Lucene.Net.Analysis.Token;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
 
 namespace Lucene.Net.Index
 {
+	
 	[TestFixture]
-	public class TestTermVectorsReader
+	public class TestTermVectorsReader : LuceneTestCase
 	{
 		private void  InitBlock()
 		{
 			positions = new int[testTerms.Length][];
 			offsets = new TermVectorOffsetInfo[testTerms.Length][];
+			tokens = new TestToken[testTerms.Length * TERM_FREQ];
 		}
-		private TermVectorsWriter writer = null;
 		//Must be lexicographically sorted, will do in setup, versus trying to maintain here
-		private System.String[] testFields = new System.String[]{"f1", "f2", "f3"};
+		private System.String[] testFields = new System.String[]{"f1", "f2", "f3", "f4"};
 		private bool[] testFieldsStorePos = new bool[]{true, false, true, false};
 		private bool[] testFieldsStoreOff = new bool[]{true, false, false, true};
 		private System.String[] testTerms = new System.String[]{"this", "is", "a", "test"};
 		private int[][] positions;
 		private TermVectorOffsetInfo[][] offsets;
-		private RAMDirectory dir = new RAMDirectory();
-		private System.String seg = "testSegment";
+		private MockRAMDirectory dir = new MockRAMDirectory();
+		private System.String seg;
 		private FieldInfos fieldInfos = new FieldInfos();
+		private static int TERM_FREQ = 3;
 		
-        public TestTermVectorsReader()
-        {
-            InitBlock();
-        }
-
-        public TestTermVectorsReader(System.String s)
+		public TestTermVectorsReader():base()
 		{
 			InitBlock();
 		}
 		
-		[SetUp]
-        public virtual void  SetUp()
+		internal class TestToken : System.IComparable
 		{
-			for (int i = 0; i < testFields.Length; i++)
+			public TestToken(TestTermVectorsReader enclosingInstance)
 			{
-				fieldInfos.Add(testFields[i], true, true, testFieldsStorePos[i], testFieldsStoreOff[i]);
+				InitBlock(enclosingInstance);
 			}
-			
-			for (int i = 0; i < testTerms.Length; i++)
+			private void  InitBlock(TestTermVectorsReader enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTermVectorsReader enclosingInstance;
+			public TestTermVectorsReader Enclosing_Instance
 			{
-				positions[i] = new int[3];
-				for (int j = 0; j < positions[i].Length; j++)
+				get
 				{
-					// poditions are always sorted in increasing order
-					positions[i][j] = (int) (j * 10 + (new System.Random().NextDouble()) * 10);
+					return enclosingInstance;
 				}
-				offsets[i] = new TermVectorOffsetInfo[3];
-				for (int j = 0; j < offsets[i].Length; j++)
+				
+			}
+			internal System.String text;
+			internal int pos;
+			internal int startOffset;
+			internal int endOffset;
+			public virtual int CompareTo(System.Object other)
+			{
+				return pos - ((TestToken) other).pos;
+			}
+		}
+		
+		internal TestToken[] tokens;
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			/*
+			for (int i = 0; i < testFields.length; i++) {
+			fieldInfos.add(testFields[i], true, true, testFieldsStorePos[i], testFieldsStoreOff[i]);
+			}
+			*/
+			System.Random random = new System.Random();
+			System.Array.Sort(testTerms);
+			int tokenUpto = 0;
+			for (int i = 0; i < testTerms.Length; i++)
+			{
+				positions[i] = new int[TERM_FREQ];
+				offsets[i] = new TermVectorOffsetInfo[TERM_FREQ];
+				// first position must be 0
+				for (int j = 0; j < TERM_FREQ; j++)
 				{
-					// ofsets are alway sorted in increasing order
+					// positions are always sorted in increasing order
+					positions[i][j] = (int) (j * 10 + random.NextDouble() * 10);
+					// offsets are always sorted in increasing order
 					offsets[i][j] = new TermVectorOffsetInfo(j * 10, j * 10 + testTerms[i].Length);
+					TestToken token = tokens[tokenUpto++] = new TestToken(this);
+					token.text = testTerms[i];
+					token.pos = positions[i][j];
+					token.startOffset = offsets[i][j].GetStartOffset();
+					token.endOffset = offsets[i][j].GetEndOffset();
 				}
 			}
-			System.Array.Sort(testTerms);
+			System.Array.Sort(tokens);
+			
+			IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(this), true);
+			writer.SetUseCompoundFile(false);
+			Document doc = new Document();
+			for (int i = 0; i < testFields.Length; i++)
+			{
+				Field.TermVector tv;
+				if (testFieldsStorePos[i] && testFieldsStoreOff[i])
+					tv = Field.TermVector.WITH_POSITIONS_OFFSETS;
+				else if (testFieldsStorePos[i] && !testFieldsStoreOff[i])
+					tv = Field.TermVector.WITH_POSITIONS;
+				else if (!testFieldsStorePos[i] && testFieldsStoreOff[i])
+					tv = Field.TermVector.WITH_OFFSETS;
+				else
+					tv = Field.TermVector.YES;
+				doc.Add(new Field(testFields[i], "", Field.Store.NO, Field.Index.TOKENIZED, tv));
+			}
+			
+			//Create 5 documents for testing, they all have the same
+			//terms
 			for (int j = 0; j < 5; j++)
+				writer.AddDocument(doc);
+			writer.Flush();
+			seg = writer.NewestSegment().name;
+			writer.Close();
+			
+			fieldInfos = new FieldInfos(dir, seg + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
+		}
+		
+		private class MyTokenStream : TokenStream
+		{
+			public MyTokenStream(TestTermVectorsReader enclosingInstance)
 			{
-				writer = new TermVectorsWriter(dir, seg, fieldInfos);
-				writer.OpenDocument();
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTermVectorsReader enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTermVectorsReader enclosingInstance;
+			public TestTermVectorsReader Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
 				
-				for (int k = 0; k < testFields.Length; k++)
+			}
+			internal int tokenUpto;
+			public override Token Next()
+			{
+				if (tokenUpto >= Enclosing_Instance.tokens.Length)
+					return null;
+				else
 				{
-					writer.OpenField(testFields[k]);
-					for (int i = 0; i < testTerms.Length; i++)
-					{
-						writer.AddTerm(testTerms[i], 3, positions[i], offsets[i]);
-					}
-					writer.CloseField();
+					Token t = new Token();
+					TestToken testToken = Enclosing_Instance.tokens[tokenUpto++];
+					t.SetTermText(testToken.text);
+					if (tokenUpto > 1)
+						t.SetPositionIncrement(testToken.pos - Enclosing_Instance.tokens[tokenUpto - 2].pos);
+					else
+						t.SetPositionIncrement(testToken.pos + 1);
+					t.SetStartOffset(testToken.startOffset);
+					t.SetEndOffset(testToken.endOffset);
+					return t;
 				}
-				writer.CloseDocument();
-				writer.Close();
 			}
 		}
 		
-		[TearDown]
-        public virtual void  TearDown()
+		private class MyAnalyzer : Analyzer
 		{
-			
+			public MyAnalyzer(TestTermVectorsReader enclosingInstance)
+			{
+				InitBlock(enclosingInstance);
+			}
+			private void  InitBlock(TestTermVectorsReader enclosingInstance)
+			{
+				this.enclosingInstance = enclosingInstance;
+			}
+			private TestTermVectorsReader enclosingInstance;
+			public TestTermVectorsReader Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+			{
+				return new MyTokenStream(enclosingInstance);
+			}
 		}
 		
 		[Test]
-        public virtual void  Test()
+		public virtual void  Test()
 		{
 			//Check to see the files were created properly in setup
-			Assert.IsTrue(writer.IsDocumentOpen() == false);
-			Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvxExtension));
-			Assert.IsTrue(dir.FileExists(seg + TermVectorsWriter.TvxExtension));
+			Assert.IsTrue(dir.FileExists(seg + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+			Assert.IsTrue(dir.FileExists(seg + "." + IndexFileNames.VECTORS_INDEX_EXTENSION));
 		}
 		
 		[Test]
-        public virtual void  TestReader()
+		public virtual void  TestReader()
 		{
 			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
 			Assert.IsTrue(reader != null);
-			TermFreqVector vector = reader.Get(0, testFields[0]);
-			Assert.IsTrue(vector != null);
-			System.String[] terms = vector.GetTerms();
-			Assert.IsTrue(terms != null);
-			Assert.IsTrue(terms.Length == testTerms.Length);
-			for (int i = 0; i < terms.Length; i++)
+			for (int j = 0; j < 5; j++)
 			{
-				System.String term = terms[i];
-				//System.out.println("Term: " + term);
-				Assert.IsTrue(term.Equals(testTerms[i]));
+				TermFreqVector vector = reader.Get(j, testFields[0]);
+				Assert.IsTrue(vector != null);
+				System.String[] terms = vector.GetTerms();
+				Assert.IsTrue(terms != null);
+				Assert.IsTrue(terms.Length == testTerms.Length);
+				for (int i = 0; i < terms.Length; i++)
+				{
+					System.String term = terms[i];
+					//System.out.println("Term: " + term);
+					Assert.IsTrue(term.Equals(testTerms[i]));
+				}
 			}
 		}
 		
 		[Test]
-        public virtual void  TestPositionReader()
+		public virtual void  TestPositionReader()
 		{
 			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
 			Assert.IsTrue(reader != null);
@@ -179,7 +291,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestOffsetReader()
+		public virtual void  TestOffsetReader()
 		{
 			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
 			Assert.IsTrue(reader != null);
@@ -212,10 +324,139 @@
 			}
 		}
 		
+		[Test]
+		public virtual void  TestMapper()
+		{
+			TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+			Assert.IsTrue(reader != null);
+			SortedTermVectorMapper mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
+			reader.Get(0, mapper);
+			System.Collections.Generic.SortedDictionary<Object,Object> set_Renamed = mapper.GetTermVectorEntrySet();
+			Assert.IsTrue(set_Renamed != null, "set is null and it shouldn't be");
+			//three fields, 4 terms, all terms are the same
+			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
+			//Check offsets and positions
+			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
+			{
+				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
+				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
+				Assert.IsTrue(tve.GetOffsets() != null, "tve.getOffsets() is null and it shouldn't be");
+				Assert.IsTrue(tve.GetPositions() != null, "tve.getPositions() is null and it shouldn't be");
+			}
+			
+			mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
+			reader.Get(1, mapper);
+			set_Renamed = mapper.GetTermVectorEntrySet();
+			Assert.IsTrue(set_Renamed != null, "set is null and it shouldn't be");
+			//three fields, 4 terms, all terms are the same
+			Assert.IsTrue(set_Renamed.Count == 4, "set Size: " + set_Renamed.Count + " is not: " + 4);
+			//Should have offsets and positions b/c we are munging all the fields together
+			for (System.Collections.IEnumerator iterator = set_Renamed.Keys.GetEnumerator(); iterator.MoveNext(); )
+			{
+				TermVectorEntry tve = (TermVectorEntry) iterator.Current;
+				Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
+				Assert.IsTrue(tve.GetOffsets() != null, "tve.getOffsets() is null and it shouldn't be");
+				Assert.IsTrue(tve.GetPositions() != null, "tve.getPositions() is null and it shouldn't be");
+			}
+			
+			
+			FieldSortedTermVectorMapper fsMapper = new FieldSortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
+			reader.Get(0, fsMapper);
+			System.Collections.IDictionary map = fsMapper.GetFieldToTerms();
+			Assert.IsTrue(map.Count == testFields.Length, "map Size: " + map.Count + " is not: " + testFields.Length);
+			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(map).GetEnumerator(); iterator.MoveNext(); )
+			{
+				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
+				System.Collections.Generic.SortedDictionary<Object, Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object, Object>)entry.Value;
+				Assert.IsTrue(sortedSet.Count == 4, "sortedSet Size: " + sortedSet.Count + " is not: " + 4);
+				for (System.Collections.IEnumerator inner = sortedSet.Keys.GetEnumerator(); inner.MoveNext(); )
+				{
+					TermVectorEntry tve = (TermVectorEntry) inner.Current;
+					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
+					//Check offsets and positions.
+					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
+					System.String field = tve.GetField();
+					if (field.Equals(testFields[0]))
+					{
+						//should have offsets
+						
+						Assert.IsTrue(tve.GetOffsets() != null, "tve.getOffsets() is null and it shouldn't be");
+						Assert.IsTrue(tve.GetPositions() != null, "tve.getPositions() is null and it shouldn't be");
+					}
+					else if (field.Equals(testFields[1]))
+					{
+						//should not have offsets
+						
+						Assert.IsTrue(tve.GetOffsets() == null, "tve.getOffsets() is not null and it shouldn't be");
+						Assert.IsTrue(tve.GetPositions() == null, "tve.getPositions() is not null and it shouldn't be");
+					}
+				}
+			}
+			//Try mapper that ignores offs and positions
+			fsMapper = new FieldSortedTermVectorMapper(true, true, new TermVectorEntryFreqSortedComparator());
+			reader.Get(0, fsMapper);
+			map = fsMapper.GetFieldToTerms();
+			Assert.IsTrue(map.Count == testFields.Length, "map Size: " + map.Count + " is not: " + testFields.Length);
+			for (System.Collections.IEnumerator iterator = new System.Collections.Hashtable(map).GetEnumerator(); iterator.MoveNext(); )
+			{
+				System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iterator.Current;
+				System.Collections.Generic.SortedDictionary<Object, Object> sortedSet = (System.Collections.Generic.SortedDictionary<Object, Object>)entry.Value;
+				Assert.IsTrue(sortedSet.Count == 4, "sortedSet Size: " + sortedSet.Count + " is not: " + 4);
+				for (System.Collections.IEnumerator inner = sortedSet.Keys.GetEnumerator(); inner.MoveNext(); )
+				{
+					TermVectorEntry tve = (TermVectorEntry) inner.Current;
+					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
+					//Check offsets and positions.
+					Assert.IsTrue(tve != null, "tve is null and it shouldn't be");
+					System.String field = tve.GetField();
+					if (field.Equals(testFields[0]))
+					{
+						//should have offsets
+						
+						Assert.IsTrue(tve.GetOffsets() == null, "tve.getOffsets() is null and it shouldn't be");
+						Assert.IsTrue(tve.GetPositions() == null, "tve.getPositions() is null and it shouldn't be");
+					}
+					else if (field.Equals(testFields[1]))
+					{
+						//should not have offsets
+						
+						Assert.IsTrue(tve.GetOffsets() == null, "tve.getOffsets() is not null and it shouldn't be");
+						Assert.IsTrue(tve.GetPositions() == null, "tve.getPositions() is not null and it shouldn't be");
+					}
+				}
+			}
+			
+			// test setDocumentNumber()
+			IndexReader ir = IndexReader.Open(dir);
+			DocNumAwareMapper docNumAwareMapper = new DocNumAwareMapper();
+			Assert.AreEqual(- 1, docNumAwareMapper.GetDocumentNumber());
+			
+			ir.GetTermFreqVector(0, docNumAwareMapper);
+			Assert.AreEqual(0, docNumAwareMapper.GetDocumentNumber());
+			docNumAwareMapper.SetDocumentNumber(- 1);
+			
+			ir.GetTermFreqVector(1, docNumAwareMapper);
+			Assert.AreEqual(1, docNumAwareMapper.GetDocumentNumber());
+			docNumAwareMapper.SetDocumentNumber(- 1);
+			
+			ir.GetTermFreqVector(0, "f1", docNumAwareMapper);
+			Assert.AreEqual(0, docNumAwareMapper.GetDocumentNumber());
+			docNumAwareMapper.SetDocumentNumber(- 1);
+			
+			ir.GetTermFreqVector(1, "f2", docNumAwareMapper);
+			Assert.AreEqual(1, docNumAwareMapper.GetDocumentNumber());
+			docNumAwareMapper.SetDocumentNumber(- 1);
+			
+			ir.GetTermFreqVector(0, "f1", docNumAwareMapper);
+			Assert.AreEqual(0, docNumAwareMapper.GetDocumentNumber());
+			
+			ir.Close();
+		}
+		
 		
 		/// <summary> Make sure exceptions and bad params are handled appropriately</summary>
 		[Test]
-        public virtual void  TestBadParams()
+		public virtual void  TestBadParams()
 		{
 			try
 			{
@@ -225,7 +466,7 @@
 				reader.Get(50, testFields[0]);
 				Assert.Fail();
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				// expected exception
 			}
@@ -237,7 +478,7 @@
 				reader.Get(50);
 				Assert.Fail();
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				// expected exception
 			}
@@ -249,10 +490,47 @@
 				TermFreqVector vector = reader.Get(0, "f50");
 				Assert.IsTrue(vector == null);
 			}
-			catch (System.IO.IOException e)
+			catch (System.IO.IOException)
 			{
 				Assert.Fail();
 			}
 		}
+		
+		
+		public class DocNumAwareMapper : TermVectorMapper
+		{
+			
+			public DocNumAwareMapper()
+			{
+			}
+			
+			private int documentNumber = - 1;
+			
+			public override void  SetExpectations(System.String field, int numTerms, bool storeOffsets, bool storePositions)
+			{
+				if (documentNumber == - 1)
+				{
+					throw new System.SystemException("Documentnumber should be set at this point!");
+				}
+			}
+			
+			public override void  Map(System.String term, int frequency, TermVectorOffsetInfo[] offsets, int[] positions)
+			{
+				if (documentNumber == - 1)
+				{
+					throw new System.SystemException("Documentnumber should be set at this point!");
+				}
+			}
+			
+			public virtual int GetDocumentNumber()
+			{
+				return documentNumber;
+			}
+			
+			public override void  SetDocumentNumber(int documentNumber)
+			{
+				this.documentNumber = documentNumber;
+			}
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermVectorsWriter.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsWriter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsWriter.cs Tue Jul 15 14:44:04 2008
@@ -14,7 +14,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+// {{Aroush-2.3.1}} remove this file from SVN
+/*
 using System;
 
 using NUnit.Framework;
@@ -101,7 +102,7 @@
 		e.printStackTrace();
 		Assert.IsTrue(false);
 		}
-		}  */
+		}  *
 		
 		[Test]
         public virtual void  TestWriter()
@@ -229,4 +230,5 @@
 			ir.Close();
 		}
 	}
-}
\ No newline at end of file
+}
+*/
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestTermdocPerf.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs Tue Jul 15 14:44:04 2008
@@ -19,13 +19,14 @@
 
 using NUnit.Framework;
 
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
 using Token = Lucene.Net.Analysis.Token;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
 
 namespace Lucene.Net.Index
 {
@@ -34,6 +35,7 @@
 	/// </author>
 	/// <version>  $Id$
 	/// </version>
+	
 	class RepeatingTokenStream : TokenStream
 	{
 		public int num;
@@ -51,7 +53,7 @@
 	}
 	
 	[TestFixture]
-	public class TestTermdocPerf
+	public class TestTermdocPerf : LuceneTestCase
 	{
 		private class AnonymousClassAnalyzer:Analyzer
 		{
@@ -144,7 +146,7 @@
 			return ret;
 		}
 		
-        [Test]
+		[Test]
 		public virtual void  TestTermDocPerf()
 		{
 			// performance test for 10% of documents containing a term

Added: incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestThreadedOptimize.cs?rev=677059&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestThreadedOptimize.cs Tue Jul 15 14:44:04 2008
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+
+using NUnit.Framework;
+
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Directory = Lucene.Net.Store.Directory;
+using FSDirectory = Lucene.Net.Store.FSDirectory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using English = Lucene.Net.Util.English;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using _TestUtil = Lucene.Net.Util._TestUtil;
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+
+namespace Lucene.Net.Index
+{
+	
+	[TestFixture]
+	public class TestThreadedOptimize : LuceneTestCase
+	{
+		private class AnonymousClassThread : SupportClass.ThreadClass
+		{
+			public AnonymousClassThread(Lucene.Net.Index.IndexWriter writerFinal, int iFinal, int iterFinal, TestThreadedOptimize enclosingInstance)
+			{
+				InitBlock(writerFinal, iFinal, iterFinal, enclosingInstance);
+			}
+			private void  InitBlock(Lucene.Net.Index.IndexWriter writerFinal, int iFinal, int iterFinal, TestThreadedOptimize enclosingInstance)
+			{
+				this.writerFinal = writerFinal;
+				this.iFinal = iFinal;
+				this.iterFinal = iterFinal;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private Lucene.Net.Index.IndexWriter writerFinal;
+			private int iFinal;
+			private int iterFinal;
+			private TestThreadedOptimize enclosingInstance;
+			public TestThreadedOptimize Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			override public void  Run()
+			{
+				try
+				{
+					for (int j = 0; j < Lucene.Net.Index.TestThreadedOptimize.NUM_ITER2; j++)
+					{
+						writerFinal.Optimize(false);
+						for (int k = 0; k < 17 * (1 + iFinal); k++)
+						{
+							Document d = new Document();
+							d.Add(new Field("id", iterFinal + "_" + iFinal + "_" + j + "_" + k, Field.Store.YES, Field.Index.UN_TOKENIZED));
+							d.Add(new Field("contents", English.IntToEnglish(iFinal + k), Field.Store.NO, Field.Index.TOKENIZED));
+							writerFinal.AddDocument(d);
+						}
+						for (int k = 0; k < 9 * (1 + iFinal); k++)
+							writerFinal.DeleteDocuments(new Term("id", iterFinal + "_" + iFinal + "_" + j + "_" + k));
+						writerFinal.Optimize();
+					}
+				}
+				catch (System.Exception t)
+				{
+					Enclosing_Instance.SetFailed();
+					System.Console.Out.WriteLine(SupportClass.ThreadClass.Current().Name + ": hit exception");
+					System.Console.Out.WriteLine(t.StackTrace);
+				}
+			}
+		}
+		
+		private static readonly Analyzer ANALYZER = new SimpleAnalyzer();
+		
+		private const int NUM_THREADS = 3;
+		//private final static int NUM_THREADS = 5;
+		
+		private const int NUM_ITER = 2;
+		//private final static int NUM_ITER = 10;
+		
+		private const int NUM_ITER2 = 2;
+		//private final static int NUM_ITER2 = 5;
+		
+		private bool failed;
+		
+		private void  SetFailed()
+		{
+			failed = true;
+		}
+		
+		public virtual void  RunTest(Directory directory, bool autoCommit, MergeScheduler merger)
+		{
+			
+			IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
+			writer.SetMaxBufferedDocs(2);
+			if (merger != null)
+				writer.SetMergeScheduler(merger);
+			
+			for (int iter = 0; iter < NUM_ITER; iter++)
+			{
+				int iterFinal = iter;
+				
+				writer.SetMergeFactor(1000);
+				
+				for (int i = 0; i < 200; i++)
+				{
+					Document d = new Document();
+					d.Add(new Field("id", System.Convert.ToString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+					d.Add(new Field("contents", English.IntToEnglish(i), Field.Store.NO, Field.Index.TOKENIZED));
+					writer.AddDocument(d);
+				}
+				
+				writer.SetMergeFactor(4);
+				//writer.setInfoStream(System.out);
+				
+				int docCount = writer.DocCount();
+				
+				SupportClass.ThreadClass[] threads = new SupportClass.ThreadClass[NUM_THREADS];
+				
+				for (int i = 0; i < NUM_THREADS; i++)
+				{
+					int iFinal = i;
+					IndexWriter writerFinal = writer;
+					threads[i] = new AnonymousClassThread(writerFinal, iFinal, iterFinal, this);
+				}
+				
+				for (int i = 0; i < NUM_THREADS; i++)
+					threads[i].Start();
+				
+				for (int i = 0; i < NUM_THREADS; i++)
+					threads[i].Join();
+				
+				Assert.IsTrue(!failed);
+				
+				int expectedDocCount = (int) ((1 + iter) * (200 + 8 * NUM_ITER2 * (NUM_THREADS / 2.0) * (1 + NUM_THREADS)));
+				
+				// System.out.println("TEST: now index=" + writer.segString());
+				
+				Assert.AreEqual(expectedDocCount, writer.DocCount());
+				
+				if (!autoCommit)
+				{
+					writer.Close();
+					writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
+					writer.SetMaxBufferedDocs(2);
+				}
+				
+				IndexReader reader = IndexReader.Open(directory);
+				Assert.IsTrue(reader.IsOptimized());
+				Assert.AreEqual(expectedDocCount, reader.NumDocs());
+				reader.Close();
+			}
+			writer.Close();
+		}
+		
+		/*
+		Run above stress test against RAMDirectory and then
+		FSDirectory.
+		*/
+		[Test]
+		public virtual void  TestThreadedOptimize_Renamed_Method()
+		{
+			Directory directory = new MockRAMDirectory();
+			RunTest(directory, false, null);
+			RunTest(directory, true, null);
+			RunTest(directory, false, new ConcurrentMergeScheduler());
+			RunTest(directory, true, new ConcurrentMergeScheduler());
+			directory.Close();
+
+            System.String tempDir = SupportClass.AppSettings.Get("tempDir", "");
+			if (tempDir == null)
+				throw new System.IO.IOException("tempDir undefined, cannot run test");
+			
+			System.String dirName = tempDir + "/luceneTestThreadedOptimize";
+			directory = FSDirectory.GetDirectory(dirName);
+			RunTest(directory, false, null);
+			RunTest(directory, true, null);
+			RunTest(directory, false, new ConcurrentMergeScheduler());
+			RunTest(directory, true, new ConcurrentMergeScheduler());
+			directory.Close();
+			_TestUtil.RmDir(dirName);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/TestWordlistLoader.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Index/TestWordlistLoader.cs Tue Jul 15 14:44:04 2008
@@ -20,11 +20,12 @@
 using NUnit.Framework;
 
 using WordlistLoader = Lucene.Net.Analysis.WordlistLoader;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Index
 {
 	[TestFixture]
-	public class TestWordlistLoader
+	public class TestWordlistLoader : LuceneTestCase
 	{
 		[Test]
 		public virtual void  TestWordlistLoading()

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.presharedstores.cfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.presharedstores.cfs.zip?rev=677059&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.presharedstores.cfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: incubator/lucene.net/trunk/C#/src/Test/Index/index.presharedstores.nocfs.zip
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Index/index.presharedstores.nocfs.zip?rev=677059&view=auto
==============================================================================
Binary file - no diff available.

Propchange: incubator/lucene.net/trunk/C#/src/Test/Index/index.presharedstores.nocfs.zip
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiAnalyzer.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiAnalyzer.cs Tue Jul 15 14:44:04 2008
@@ -19,15 +19,16 @@
 
 using NUnit.Framework;
 
-using Query = Lucene.Net.Search.Query;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using LowerCaseFilter = Lucene.Net.Analysis.LowerCaseFilter;
 using Token = Lucene.Net.Analysis.Token;
 using TokenFilter = Lucene.Net.Analysis.TokenFilter;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using StandardTokenizer = Lucene.Net.Analysis.Standard.StandardTokenizer;
+using Query = Lucene.Net.Search.Query;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
-namespace Lucene.Net.QueryParser
+namespace Lucene.Net.QueryParsers
 {
 	
 	/// <summary> Test QueryParser's ability to deal with Analyzers that return more
@@ -38,13 +39,13 @@
 	/// <author>  Daniel Naber
 	/// </author>
 	[TestFixture]
-    public class TestMultiAnalyzer
+	public class TestMultiAnalyzer : LuceneTestCase
 	{
 		
 		private static int multiToken = 0;
 		
 		[Test]
-        public virtual void  TestMultiAnalyzer_Renamed_Method()
+		public virtual void  TestMultiAnalyzer_Renamed_Method()
 		{
 			
 			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("", new MultiAnalyzer(this));
@@ -82,36 +83,36 @@
 			// phrase with non-default boost:
 			Assert.AreEqual("\"(multi multi2) foo\"^2.0", qp.Parse("\"multi foo\"^2").ToString());
 			
-            // phrase after changing default slop
-            qp.SetPhraseSlop(99);
-            Assert.AreEqual("\"(multi multi2) foo\"~99 bar", qp.Parse("\"multi foo\" bar").ToString());
-            Assert.AreEqual("\"(multi multi2) foo\"~99 \"foo bar\"~2", qp.Parse("\"multi foo\" \"foo bar\"~2").ToString());
-            qp.SetPhraseSlop(0);
+			// phrase after changing default slop
+			qp.SetPhraseSlop(99);
+			Assert.AreEqual("\"(multi multi2) foo\"~99 bar", qp.Parse("\"multi foo\" bar").ToString());
+			Assert.AreEqual("\"(multi multi2) foo\"~99 \"foo bar\"~2", qp.Parse("\"multi foo\" \"foo bar\"~2").ToString());
+			qp.SetPhraseSlop(0);
 			
-            // non-default operator:
+			// non-default operator:
 			qp.SetDefaultOperator(Lucene.Net.QueryParsers.QueryParser.AND_OPERATOR);
 			Assert.AreEqual("+(multi multi2) +foo", qp.Parse("multi foo").ToString());
 		}
 		
-        [Test]
-        public virtual void  TestMultiAnalyzerWithSubclassOfQueryParser()
-        {
-			
-            DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer(this));
-            qp.SetPhraseSlop(99); // modified default slop
-			
-            // direct call to (super's) getFieldQuery to demonstrate differnce
-            // between phrase and multiphrase with modified default slop
-            Assert.AreEqual("\"foo bar\"~99", qp.GetSuperFieldQuery("", "foo bar").ToString());
-            Assert.AreEqual("\"(multi multi2) bar\"~99", qp.GetSuperFieldQuery("", "multi bar").ToString());
-			
-			
-            // ask sublcass to parse phrase with modified default slop
-            Assert.AreEqual("\"(multi multi2) foo\"~99 bar", qp.Parse("\"multi foo\" bar").ToString());
-        }
+		[Test]
+		public virtual void  TestMultiAnalyzerWithSubclassOfQueryParser()
+		{
+			
+			DumbQueryParser qp = new DumbQueryParser("", new MultiAnalyzer(this));
+			qp.SetPhraseSlop(99); // modified default slop
+			
+			// direct call to (super's) getFieldQuery to demonstrate differnce
+			// between phrase and multiphrase with modified default slop
+			Assert.AreEqual("\"foo bar\"~99", qp.GetSuperFieldQuery("", "foo bar").ToString());
+			Assert.AreEqual("\"(multi multi2) bar\"~99", qp.GetSuperFieldQuery("", "multi bar").ToString());
+			
+			
+			// ask sublcass to parse phrase with modified default slop
+			Assert.AreEqual("\"(multi multi2) foo\"~99 bar", qp.Parse("\"multi foo\" bar").ToString());
+		}
 		
-        [Test]
-        public virtual void  TestPosIncrementAnalyzer()
+		[Test]
+		public virtual void  TestPosIncrementAnalyzer()
 		{
 			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("", new PosIncrementAnalyzer(this));
 			Assert.AreEqual("quick brown", qp.Parse("the quick brown").ToString());
@@ -131,7 +132,7 @@
 			}
 			private TestMultiAnalyzer enclosingInstance;
 			
-            public TestMultiAnalyzer Enclosing_Instance
+			public TestMultiAnalyzer Enclosing_Instance
 			{
 				get
 				{
@@ -172,18 +173,18 @@
 			
 			private Lucene.Net.Analysis.Token prevToken;
 			
-            public TestFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
+			public TestFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
 			{
 				InitBlock(enclosingInstance);
 			}
 			
 			public override Lucene.Net.Analysis.Token Next()
 			{
-				if (Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken > 0)
+				if (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken > 0)
 				{
-					Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token("multi" + (Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken + 1), prevToken.StartOffset(), prevToken.EndOffset(), prevToken.Type());
+					Lucene.Net.Analysis.Token token = new Lucene.Net.Analysis.Token("multi" + (Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken + 1), prevToken.StartOffset(), prevToken.EndOffset(), prevToken.Type());
 					token.SetPositionIncrement(0);
-					Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken--;
+					Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken--;
 					return token;
 				}
 				else
@@ -195,12 +196,12 @@
 					System.String text = t.TermText();
 					if (text.Equals("triplemulti"))
 					{
-						Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken = 2;
+						Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken = 2;
 						return t;
 					}
 					else if (text.Equals("multi"))
 					{
-						Lucene.Net.QueryParser.TestMultiAnalyzer.multiToken = 1;
+						Lucene.Net.QueryParsers.TestMultiAnalyzer.multiToken = 1;
 						return t;
 					}
 					else
@@ -260,14 +261,14 @@
 				
 			}
 			
-            public TestPosIncrementFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
+			public TestPosIncrementFilter(TestMultiAnalyzer enclosingInstance, TokenStream in_Renamed) : base(in_Renamed)
 			{
 				InitBlock(enclosingInstance);
 			}
 			
 			public override Lucene.Net.Analysis.Token Next()
 			{
-				for (Token t = input.Next(); t != null; t = input.Next())
+				for (Lucene.Net.Analysis.Token t = input.Next(); t != null; t = input.Next())
 				{
 					if (t.TermText().Equals("the"))
 					{
@@ -290,46 +291,46 @@
 			}
 		}
 		
-        /// <summary>a very simple subclass of QueryParser </summary>
-        public class DumbQueryParser : Lucene.Net.QueryParsers.QueryParser
-        {
-			
-            public DumbQueryParser(System.String f, Analyzer a):base(f, a)
-            {
-            }
-			
-            /// <summary>expose super's version </summary>
-            public Lucene.Net.Search.Query GetSuperFieldQuery(System.String f, System.String t)
-            {
-                return base.GetFieldQuery(f, t);
-            }
-            /// <summary>wrap super's version </summary>
-            protected internal virtual Lucene.Net.Search.Query GetFieldQuery(System.String f, System.String t)
-            {
-                return new DumbQueryWrapper(GetSuperFieldQuery(f, t));
-            }
-        }
+		/// <summary>a very simple subclass of QueryParser </summary>
+		public class DumbQueryParser : Lucene.Net.QueryParsers.QueryParser
+		{
+			
+			public DumbQueryParser(System.String f, Analyzer a):base(f, a)
+			{
+			}
+			
+			/// <summary>expose super's version </summary>
+			public Lucene.Net.Search.Query GetSuperFieldQuery(System.String f, System.String t)
+			{
+				return base.GetFieldQuery(f, t);
+			}
+			/// <summary>wrap super's version </summary>
+			public override Lucene.Net.Search.Query GetFieldQuery(System.String f, System.String t)
+			{
+				return new DumbQueryWrapper(GetSuperFieldQuery(f, t));
+			}
+		}
 		
-        /// <summary> A very simple wrapper to prevent instanceof checks but uses
-        /// the toString of the query it wraps.
-        /// </summary>
-        [Serializable]
-        private sealed class DumbQueryWrapper : Lucene.Net.Search.Query
-        {
-			
-            private Lucene.Net.Search.Query q;
-            public DumbQueryWrapper(Lucene.Net.Search.Query q):base()
-            {
-                this.q = q;
-            }
-            public override System.String ToString(System.String f)
-            {
-                return q.ToString(f);
-            }
-            override public System.Object Clone()
-            {
-                return null;
-            }
-        }
-    }
+		/// <summary> A very simple wrapper to prevent instanceof checks but uses
+		/// the toString of the query it wraps.
+		/// </summary>
+		[Serializable]
+		private sealed class DumbQueryWrapper : Lucene.Net.Search.Query
+		{
+			
+			private Lucene.Net.Search.Query q;
+			public DumbQueryWrapper(Lucene.Net.Search.Query q):base()
+			{
+				this.q = q;
+			}
+			public override System.String ToString(System.String f)
+			{
+				return q.ToString(f);
+			}
+			override public System.Object Clone()
+			{
+				return null;
+			}
+		}
+	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/QueryParser/TestMultiFieldQueryParser.cs?rev=677059&r1=677058&r2=677059&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/QueryParser/TestMultiFieldQueryParser.cs Tue Jul 15 14:44:04 2008
@@ -19,30 +19,63 @@
 
 using NUnit.Framework;
 
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using Token = Lucene.Net.Analysis.Token;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
-using Document = Lucene.Net.Documents.Document;
-using Field = Lucene.Net.Documents.Field;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
 using BooleanClause = Lucene.Net.Search.BooleanClause;
+using BooleanQuery = Lucene.Net.Search.BooleanQuery;
 using Hits = Lucene.Net.Search.Hits;
 using IndexSearcher = Lucene.Net.Search.IndexSearcher;
 using Query = Lucene.Net.Search.Query;
-using Directory = Lucene.Net.Store.Directory;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using MultiFieldQueryParser = Lucene.Net.QueryParsers.MultiFieldQueryParser;
+using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
-namespace Lucene.Net.QueryParser
+namespace Lucene.Net.QueryParsers
 {
 	
 	/// <summary> Tests QueryParser.</summary>
 	/// <author>  Daniel Naber
 	/// </author>
 	[TestFixture]
-    public class TestMultiFieldQueryParser
+	public class TestMultiFieldQueryParser : LuceneTestCase
 	{
+		
+		/// <summary>test stop words arsing for both the non static form, and for the 
+		/// corresponding static form (qtxt, fields[]). 
+		/// </summary>
+		[Test]
+		public virtual void  TesStopwordsParsing()
+		{
+			AssertStopQueryEquals("one", "b:one t:one");
+			AssertStopQueryEquals("one stop", "b:one t:one");
+			AssertStopQueryEquals("one (stop)", "b:one t:one");
+			AssertStopQueryEquals("one ((stop))", "b:one t:one");
+			AssertStopQueryEquals("stop", "");
+			AssertStopQueryEquals("(stop)", "");
+			AssertStopQueryEquals("((stop))", "");
+		}
+		
+		// verify parsing of query using a stopping analyzer  
+		private void  AssertStopQueryEquals(System.String qtxt, System.String expectedRes)
+		{
+			System.String[] fields = new System.String[]{"b", "t"};
+			Occur[] occur = new Occur[]{Occur.SHOULD, Occur.SHOULD};
+			TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
+			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, a);
+			
+			Query q = mfqp.Parse(qtxt);
+			Assert.AreEqual(expectedRes, q.ToString());
+			
+			q = MultiFieldQueryParser.Parse(qtxt, fields, occur, a);
+			Assert.AreEqual(expectedRes, q.ToString());
+		}
+		
 		[Test]
 		public virtual void  TestSimple()
 		{
@@ -58,7 +91,7 @@
 			q = mfqp.Parse("+one +two");
 			Assert.AreEqual("+(b:one t:one) +(b:two t:two)", q.ToString());
 			
-			q = mfqp.Parse("+one -two -three)");
+			q = mfqp.Parse("+one -two -three");
 			Assert.AreEqual("+(b:one t:one) -(b:two t:two) -(b:three t:three)", q.ToString());
 			
 			q = mfqp.Parse("one^2 two");
@@ -100,38 +133,38 @@
 			Assert.AreEqual("+(b:\"aa bb cc\" t:\"aa bb cc\") +(b:\"dd ee\" t:\"dd ee\")", q.ToString());
 		}
 		
-        [Test]
-        public virtual void  TestBoostsSimple()
-        {
-            System.Collections.IDictionary boosts = new System.Collections.Hashtable();
-            boosts["b"] = (float) 5;
-            boosts["t"] = (float) 10;
-            System.String[] fields = new System.String[]{"b", "t"};
-            MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer(), boosts);
-			
-			
-            //Check for simple
-            Query q = mfqp.Parse("one");
-            Assert.AreEqual("b:one^5.0 t:one^10.0", q.ToString());
-			
-            //Check for AND
-            q = mfqp.Parse("one AND two");
-            Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.ToString());
-			
-            //Check for OR
-            q = mfqp.Parse("one OR two");
-            Assert.AreEqual("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.ToString());
-			
-            //Check for AND and a field
-            q = mfqp.Parse("one AND two AND foo:test");
-            Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.ToString());
-			
-            q = mfqp.Parse("one^3 AND two^4");
-            Assert.AreEqual("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.ToString());
-        }
+		[Test]
+		public virtual void  TestBoostsSimple()
+		{
+			System.Collections.IDictionary boosts = new System.Collections.Hashtable();
+			boosts["b"] = (float) 5;
+			boosts["t"] = (float) 10;
+			System.String[] fields = new System.String[]{"b", "t"};
+			MultiFieldQueryParser mfqp = new MultiFieldQueryParser(fields, new StandardAnalyzer(), boosts);
+			
+			
+			//Check for simple
+			Query q = mfqp.Parse("one");
+			Assert.AreEqual("b:one^5.0 t:one^10.0", q.ToString());
+			
+			//Check for AND
+			q = mfqp.Parse("one AND two");
+			Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0)", q.ToString());
+			
+			//Check for OR
+			q = mfqp.Parse("one OR two");
+			Assert.AreEqual("(b:one^5.0 t:one^10.0) (b:two^5.0 t:two^10.0)", q.ToString());
+			
+			//Check for AND and a field
+			q = mfqp.Parse("one AND two AND foo:test");
+			Assert.AreEqual("+(b:one^5.0 t:one^10.0) +(b:two^5.0 t:two^10.0) +foo:test", q.ToString());
+			
+			q = mfqp.Parse("one^3 AND two^4");
+			Assert.AreEqual("+((b:one^5.0 t:one^10.0)^3.0) +((b:two^5.0 t:two^10.0)^4.0)", q.ToString());
+		}
 		
-        [Test]
-        public virtual void  TestStaticMethod1()
+		[Test]
+		public virtual void  TestStaticMethod1()
 		{
 			System.String[] fields = new System.String[]{"b", "t"};
 			System.String[] queries = new System.String[]{"one", "two"};
@@ -156,14 +189,25 @@
 				q = MultiFieldQueryParser.Parse(queries5, fields, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				// expected exception, array length differs
 			}
+			
+			// check also with stop words for this static form (qtxts[], fields[]).
+			TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
+			
+			System.String[] queries6 = new System.String[]{"((+stop))", "+((stop))"};
+			q = MultiFieldQueryParser.Parse(queries6, fields, stopA);
+			Assert.AreEqual("", q.ToString());
+			
+			System.String[] queries7 = new System.String[]{"one ((+stop)) +more", "+((stop)) +two"};
+			q = MultiFieldQueryParser.Parse(queries7, fields, stopA);
+			Assert.AreEqual("(b:one +b:more) (+t:two)", q.ToString());
 		}
 		
 		[Test]
-        public virtual void  TestStaticMethod2()
+		public virtual void  TestStaticMethod2()
 		{
 			System.String[] fields = new System.String[]{"b", "t"};
 			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
@@ -179,40 +223,40 @@
 				q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				// expected exception, array length differs
 			}
 		}
 		
 		[Test]
-        public virtual void  TestStaticMethod2Old()
+		public virtual void  TestStaticMethod2Old()
 		{
-            System.String[] fields = new System.String[]{"b", "t"};
-            //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
-            BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
-            MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer());
-			
-            Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer()); //, fields, flags, new StandardAnalyzer());
-            Assert.AreEqual("+b:one -t:one", q.ToString());
-			
-            q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer());
-            Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString());
-			
-            try
-            {
-                BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
-                q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
-                Assert.Fail();
-            }
-            catch (System.ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-        }
+			System.String[] fields = new System.String[]{"b", "t"};
+			//int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
+			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
+			MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new StandardAnalyzer());
+			
+			Query q = MultiFieldQueryParser.Parse("one", fields, flags, new StandardAnalyzer()); //, fields, flags, new StandardAnalyzer());
+			Assert.AreEqual("+b:one -t:one", q.ToString());
+			
+			q = MultiFieldQueryParser.Parse("one two", fields, flags, new StandardAnalyzer());
+			Assert.AreEqual("+(b:one b:two) -(t:one t:two)", q.ToString());
+			
+			try
+			{
+				BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
+				q = MultiFieldQueryParser.Parse("blah", fields, flags2, new StandardAnalyzer());
+				Assert.Fail();
+			}
+			catch (System.ArgumentException)
+			{
+				// expected exception, array length differs
+			}
+		}
 		
 		[Test]
-        public virtual void  TestStaticMethod3()
+		public virtual void  TestStaticMethod3()
 		{
 			System.String[] queries = new System.String[]{"one", "two", "three"};
 			System.String[] fields = new System.String[]{"f1", "f2", "f3"};
@@ -226,35 +270,35 @@
 				q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
 				Assert.Fail();
 			}
-			catch (System.ArgumentException e)
+			catch (System.ArgumentException)
 			{
 				// expected exception, array length differs
 			}
 		}
 		
 		[Test]
-        public virtual void  TestStaticMethod3Old()
+		public virtual void  TestStaticMethod3Old()
 		{
-            System.String[] queries = new System.String[]{"one", "two"};
-            System.String[] fields = new System.String[]{"b", "t"};
-            BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
-            Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer());
-            Assert.AreEqual("+b:one -t:two", q.ToString());
-			
-            try
-            {
-                BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
-                q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
-                Assert.Fail();
-            }
-            catch (System.ArgumentException e)
-            {
-                // expected exception, array length differs
-            }
-        }
+			System.String[] queries = new System.String[]{"one", "two"};
+			System.String[] fields = new System.String[]{"b", "t"};
+			BooleanClause.Occur[] flags = new BooleanClause.Occur[]{BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
+			Query q = MultiFieldQueryParser.Parse(queries, fields, flags, new StandardAnalyzer());
+			Assert.AreEqual("+b:one -t:two", q.ToString());
+			
+			try
+			{
+				BooleanClause.Occur[] flags2 = new BooleanClause.Occur[]{BooleanClause.Occur.MUST};
+				q = MultiFieldQueryParser.Parse(queries, fields, flags2, new StandardAnalyzer());
+				Assert.Fail();
+			}
+			catch (System.ArgumentException)
+			{
+				// expected exception, array length differs
+			}
+		}
 		
 		[Test]
-        public virtual void  TestAnalyzerReturningNull()
+		public virtual void  TestAnalyzerReturningNull()
 		{
 			System.String[] fields = new System.String[]{"f1", "f2", "f3"};
 			MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, new AnalyzerReturningNull());
@@ -270,7 +314,7 @@
 		}
 		
 		[Test]
-        public virtual void  TestStopWordSearching()
+		public virtual void  TestStopWordSearching()
 		{
 			Analyzer analyzer = new StandardAnalyzer();
 			Directory ramDir = new RAMDirectory();
@@ -312,7 +356,7 @@
 			
 			private class EmptyTokenStream : TokenStream
 			{
-				public override Token Next()
+				public override Lucene.Net.Analysis.Token Next()
 				{
 					return null;
 				}



Mime
View raw message