lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r832486 [22/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...
Date Tue, 03 Nov 2009 18:06:38 GMT
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestNumericRangeQuery64.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestNumericRangeQuery64.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestNumericRangeQuery64.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestNumericRangeQuery64.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,601 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using NumericField = Lucene.Net.Documents.NumericField;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using NumericUtils = Lucene.Net.Util.NumericUtils;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestNumericRangeQuery64:LuceneTestCase
+	{
+		// distance of entries
+		private const long distance = 66666L;
+		// shift the starting of the values to the left, to also have negative values:
+		private const long startOffset = - 1L << 31;
+		// number of docs to generate for testing
+		private const int noDocs = 10000;
+		
+		private static RAMDirectory directory;
+		private static IndexSearcher searcher;
+		
+		/// <summary>test for constant score + boolean query + filter, the other tests only use the constant score mode </summary>
+		private void  TestRange(int precisionStep)
+		{
+			System.String field = "field" + precisionStep;
+			int count = 3000;
+			long lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3);
+			System.Int64 tempAux = (long) lower;
+			System.Int64 tempAux2 = (long) upper;
+			NumericRangeQuery q = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true);
+			System.Int64 tempAux3 = (long) lower;
+			System.Int64 tempAux4 = (long) upper;
+			NumericRangeFilter f = NumericRangeFilter.NewLongRange(field, precisionStep, tempAux3, tempAux4, true, true);
+			int lastTerms = 0;
+			for (sbyte i = 0; i < 3; i++)
+			{
+				TopDocs topDocs;
+				int terms;
+				System.String type;
+				q.ClearTotalNumberOfTerms();
+				f.ClearTotalNumberOfTerms();
+				switch (i)
+				{
+					
+					case 0: 
+						type = " (constant score filter rewrite)";
+						q.SetRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
+						topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER);
+						terms = q.GetTotalNumberOfTerms();
+						break;
+					
+					case 1: 
+						type = " (constant score boolean rewrite)";
+						q.SetRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE);
+						topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER);
+						terms = q.GetTotalNumberOfTerms();
+						break;
+					
+					case 2: 
+						type = " (filter)";
+						topDocs = searcher.Search(new MatchAllDocsQuery(), f, noDocs, Sort.INDEXORDER);
+						terms = f.GetTotalNumberOfTerms();
+						break;
+					
+					default: 
+						return ;
+					
+				}
+				System.Console.Out.WriteLine("Found " + terms + " distinct terms in range for field '" + field + "'" + type + ".");
+				ScoreDoc[] sd = topDocs.scoreDocs;
+				Assert.IsNotNull(sd);
+				Assert.AreEqual(count, sd.Length, "Score doc count" + type);
+				Document doc = searcher.Doc(sd[0].doc);
+				Assert.AreEqual(2 * distance + startOffset, System.Int64.Parse(doc.Get(field)), "First doc" + type);
+				doc = searcher.Doc(sd[sd.Length - 1].doc);
+				Assert.AreEqual((1 + count) * distance + startOffset, System.Int64.Parse(doc.Get(field)), "Last doc" + type);
+				if (i > 0)
+				{
+					Assert.AreEqual(lastTerms, terms, "Distinct term number is equal for all query types");
+				}
+				lastTerms = terms;
+			}
+		}
+		
+        [Test]
+		public virtual void  TestRange_8bit()
+		{
+			TestRange(8);
+		}
+		
+        [Test]
+		public virtual void  TestRange_6bit()
+		{
+			TestRange(6);
+		}
+		
+        [Test]
+		public virtual void  TestRange_4bit()
+		{
+			TestRange(4);
+		}
+		
+        [Test]
+		public virtual void  TestRange_2bit()
+		{
+			TestRange(2);
+		}
+		
+        [Test]
+		public virtual void  TestInverseRange()
+		{
+			System.Int64 tempAux = 1000L;
+			System.Int64 tempAux2 = - 1000L;
+			NumericRangeFilter f = NumericRangeFilter.NewLongRange("field8", 8, tempAux, tempAux2, true, true);
+			Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, f.GetDocIdSet(searcher.GetIndexReader()), "A inverse range should return the EMPTY_DOCIDSET instance");
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			System.Int64 tempAux3 = (long) System.Int64.MaxValue;
+			f = NumericRangeFilter.NewLongRange("field8", 8, tempAux3, null, false, false);
+			Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, f.GetDocIdSet(searcher.GetIndexReader()), "A exclusive range starting with Long.MAX_VALUE should return the EMPTY_DOCIDSET instance");
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			System.Int64 tempAux4 = (long) System.Int64.MinValue;
+			f = NumericRangeFilter.NewLongRange("field8", 8, null, tempAux4, false, false);
+			Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, f.GetDocIdSet(searcher.GetIndexReader()), "A exclusive range ending with Long.MIN_VALUE should return the EMPTY_DOCIDSET instance");
+		}
+		
+		private void  TestLeftOpenRange(int precisionStep)
+		{
+			System.String field = "field" + precisionStep;
+			int count = 3000;
+			long upper = (count - 1) * distance + (distance / 3) + startOffset;
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			System.Int64 tempAux = (long) upper;
+			NumericRangeQuery q = NumericRangeQuery.NewLongRange(field, precisionStep, null, tempAux, true, true);
+			TopDocs topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER);
+			System.Console.Out.WriteLine("Found " + q.GetTotalNumberOfTerms() + " distinct terms in left open range for field '" + field + "'.");
+			ScoreDoc[] sd = topDocs.scoreDocs;
+			Assert.IsNotNull(sd);
+			Assert.AreEqual(count, sd.Length, "Score doc count");
+			Document doc = searcher.Doc(sd[0].doc);
+			Assert.AreEqual(startOffset, System.Int64.Parse(doc.Get(field)), "First doc");
+			doc = searcher.Doc(sd[sd.Length - 1].doc);
+			Assert.AreEqual((count - 1) * distance + startOffset, System.Int64.Parse(doc.Get(field)), "Last doc");
+		}
+		
+        [Test]
+		public virtual void  TestLeftOpenRange_8bit()
+		{
+			TestLeftOpenRange(8);
+		}
+		
+        [Test]
+		public virtual void  TestLeftOpenRange_6bit()
+		{
+			TestLeftOpenRange(6);
+		}
+		
+        [Test]
+		public virtual void  TestLeftOpenRange_4bit()
+		{
+			TestLeftOpenRange(4);
+		}
+		
+        [Test]
+		public virtual void  TestLeftOpenRange_2bit()
+		{
+			TestLeftOpenRange(2);
+		}
+		
+		private void  TestRightOpenRange(int precisionStep)
+		{
+			System.String field = "field" + precisionStep;
+			int count = 3000;
+			long lower = (count - 1) * distance + (distance / 3) + startOffset;
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			System.Int64 tempAux = (long) lower;
+			NumericRangeQuery q = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, null, true, true);
+			TopDocs topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER);
+			System.Console.Out.WriteLine("Found " + q.GetTotalNumberOfTerms() + " distinct terms in right open range for field '" + field + "'.");
+			ScoreDoc[] sd = topDocs.scoreDocs;
+			Assert.IsNotNull(sd);
+			Assert.AreEqual(noDocs - count, sd.Length, "Score doc count");
+			Document doc = searcher.Doc(sd[0].doc);
+			Assert.AreEqual(count * distance + startOffset, System.Int64.Parse(doc.Get(field)), "First doc");
+			doc = searcher.Doc(sd[sd.Length - 1].doc);
+			Assert.AreEqual((noDocs - 1) * distance + startOffset, System.Int64.Parse(doc.Get(field)), "Last doc");
+		}
+		
+        [Test]
+		public virtual void  TestRightOpenRange_8bit()
+		{
+			TestRightOpenRange(8);
+		}
+		
+        [Test]
+		public virtual void  TestRightOpenRange_6bit()
+		{
+			TestRightOpenRange(6);
+		}
+		
+        [Test]
+		public virtual void  TestRightOpenRange_4bit()
+		{
+			TestRightOpenRange(4);
+		}
+		
+        [Test]
+		public virtual void  TestRightOpenRange_2bit()
+		{
+			TestRightOpenRange(2);
+		}
+		
+		private void  TestRandomTrieAndClassicRangeQuery(int precisionStep)
+		{
+			System.Random rnd = NewRandom();
+			System.String field = "field" + precisionStep;
+			int termCountT = 0, termCountC = 0;
+			for (int i = 0; i < 50; i++)
+			{
+				long lower = (long) (rnd.NextDouble() * noDocs * distance) + startOffset;
+				long upper = (long) (rnd.NextDouble() * noDocs * distance) + startOffset;
+				if (lower > upper)
+				{
+					long a = lower; lower = upper; upper = a;
+				}
+				// test inclusive range
+				System.Int64 tempAux = (long) lower;
+				System.Int64 tempAux2 = (long) upper;
+				NumericRangeQuery tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true);
+				TermRangeQuery cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), true, true);
+				TopDocs tTopDocs = searcher.Search(tq, 1);
+				TopDocs cTopDocs = searcher.Search(cq, 1);
+				Assert.AreEqual(cTopDocs.totalHits, tTopDocs.totalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+				termCountT += tq.GetTotalNumberOfTerms();
+				termCountC += cq.GetTotalNumberOfTerms();
+				// test exclusive range
+				System.Int64 tempAux3 = (long) lower;
+				System.Int64 tempAux4 = (long) upper;
+				tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux3, tempAux4, false, false);
+				cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), false, false);
+				tTopDocs = searcher.Search(tq, 1);
+				cTopDocs = searcher.Search(cq, 1);
+				Assert.AreEqual(cTopDocs.totalHits, tTopDocs.totalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+				termCountT += tq.GetTotalNumberOfTerms();
+				termCountC += cq.GetTotalNumberOfTerms();
+				// test left exclusive range
+				System.Int64 tempAux5 = (long) lower;
+				System.Int64 tempAux6 = (long) upper;
+				tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux5, tempAux6, false, true);
+				cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), false, true);
+				tTopDocs = searcher.Search(tq, 1);
+				cTopDocs = searcher.Search(cq, 1);
+				Assert.AreEqual(cTopDocs.totalHits, tTopDocs.totalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+				termCountT += tq.GetTotalNumberOfTerms();
+				termCountC += cq.GetTotalNumberOfTerms();
+				// test right exclusive range
+				System.Int64 tempAux7 = (long) lower;
+				System.Int64 tempAux8 = (long) upper;
+				tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux7, tempAux8, true, false);
+				cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), true, false);
+				tTopDocs = searcher.Search(tq, 1);
+				cTopDocs = searcher.Search(cq, 1);
+				Assert.AreEqual(cTopDocs.totalHits, tTopDocs.totalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal");
+				termCountT += tq.GetTotalNumberOfTerms();
+				termCountC += cq.GetTotalNumberOfTerms();
+			}
+			if (precisionStep == System.Int32.MaxValue)
+			{
+				Assert.AreEqual(termCountT, termCountC, "Total number of terms should be equal for unlimited precStep");
+			}
+			else
+			{
+				System.Console.Out.WriteLine("Average number of terms during random search on '" + field + "':");
+				System.Console.Out.WriteLine(" Trie query: " + (((double) termCountT) / (50 * 4)));
+				System.Console.Out.WriteLine(" Classical query: " + (((double) termCountC) / (50 * 4)));
+			}
+		}
+		
+        [Test]
+		public virtual void  TestRandomTrieAndClassicRangeQuery_8bit()
+		{
+			TestRandomTrieAndClassicRangeQuery(8);
+		}
+		
+        [Test]
+		public virtual void  TestRandomTrieAndClassicRangeQuery_6bit()
+		{
+			TestRandomTrieAndClassicRangeQuery(6);
+		}
+		
+        [Test]
+		public virtual void  TestRandomTrieAndClassicRangeQuery_4bit()
+		{
+			TestRandomTrieAndClassicRangeQuery(4);
+		}
+		
+        [Test]
+		public virtual void  TestRandomTrieAndClassicRangeQuery_2bit()
+		{
+			TestRandomTrieAndClassicRangeQuery(2);
+		}
+		
+        [Test]
+		public virtual void  TestRandomTrieAndClassicRangeQuery_NoTrie()
+		{
+			TestRandomTrieAndClassicRangeQuery(System.Int32.MaxValue);
+		}
+		
+		private void  TestRangeSplit(int precisionStep)
+		{
+			System.Random rnd = NewRandom();
+			System.String field = "ascfield" + precisionStep;
+			// 50 random tests
+			for (int i = 0; i < 50; i++)
+			{
+				long lower = (long) (rnd.NextDouble() * noDocs - noDocs / 2);
+				long upper = (long) (rnd.NextDouble() * noDocs - noDocs / 2);
+				if (lower > upper)
+				{
+					long a = lower; lower = upper; upper = a;
+				}
+				// test inclusive range
+				System.Int64 tempAux = (long) lower;
+				System.Int64 tempAux2 = (long) upper;
+				Query tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true);
+				TopDocs tTopDocs = searcher.Search(tq, 1);
+				Assert.AreEqual(upper - lower + 1, tTopDocs.totalHits, "Returned count of range query must be equal to inclusive range length");
+				// test exclusive range
+				System.Int64 tempAux3 = (long) lower;
+				System.Int64 tempAux4 = (long) upper;
+				tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux3, tempAux4, false, false);
+				tTopDocs = searcher.Search(tq, 1);
+				Assert.AreEqual(System.Math.Max(upper - lower - 1, 0), tTopDocs.totalHits, "Returned count of range query must be equal to exclusive range length");
+				// test left exclusive range
+				System.Int64 tempAux5 = (long) lower;
+				System.Int64 tempAux6 = (long) upper;
+				tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux5, tempAux6, false, true);
+				tTopDocs = searcher.Search(tq, 1);
+				Assert.AreEqual(upper - lower, tTopDocs.totalHits, "Returned count of range query must be equal to half exclusive range length");
+				// test right exclusive range
+				System.Int64 tempAux7 = (long) lower;
+				System.Int64 tempAux8 = (long) upper;
+				tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux7, tempAux8, true, false);
+				tTopDocs = searcher.Search(tq, 1);
+				Assert.AreEqual(upper - lower, tTopDocs.totalHits, "Returned count of range query must be equal to half exclusive range length");
+			}
+		}
+		
+        [Test]
+		public virtual void  TestRangeSplit_8bit()
+		{
+			TestRangeSplit(8);
+		}
+		
+        [Test]
+		public virtual void  TestRangeSplit_6bit()
+		{
+			TestRangeSplit(6);
+		}
+		
+        [Test]
+		public virtual void  TestRangeSplit_4bit()
+		{
+			TestRangeSplit(4);
+		}
+		
+        [Test]
+		public virtual void  TestRangeSplit_2bit()
+		{
+			TestRangeSplit(2);
+		}
+		
+		/// <summary>we fake a double test using long2double conversion of NumericUtils </summary>
+		private void  TestDoubleRange(int precisionStep)
+		{
+			System.String field = "ascfield" + precisionStep;
+			long lower = - 1000L;
+			long upper = + 2000L;
+			
+			System.Double tempAux = (double) NumericUtils.SortableLongToDouble(lower);
+			System.Double tempAux2 = (double) NumericUtils.SortableLongToDouble(upper);
+			Query tq = NumericRangeQuery.NewDoubleRange(field, precisionStep, tempAux, tempAux2, true, true);
+			TopDocs tTopDocs = searcher.Search(tq, 1);
+			Assert.AreEqual(upper - lower + 1, tTopDocs.totalHits, "Returned count of range query must be equal to inclusive range length");
+			
+			System.Double tempAux3 = (double) NumericUtils.SortableLongToDouble(lower);
+			System.Double tempAux4 = (double) NumericUtils.SortableLongToDouble(upper);
+			Filter tf = NumericRangeFilter.NewDoubleRange(field, precisionStep, tempAux3, tempAux4, true, true);
+			tTopDocs = searcher.Search(new MatchAllDocsQuery(), tf, 1);
+			Assert.AreEqual(upper - lower + 1, tTopDocs.totalHits, "Returned count of range filter must be equal to inclusive range length");
+		}
+		
+        [Test]
+		public virtual void  TestDoubleRange_8bit()
+		{
+			TestDoubleRange(8);
+		}
+		
+        [Test]
+		public virtual void  TestDoubleRange_6bit()
+		{
+			TestDoubleRange(6);
+		}
+		
+        [Test]
+		public virtual void  TestDoubleRange_4bit()
+		{
+			TestDoubleRange(4);
+		}
+		
+        [Test]
+		public virtual void  TestDoubleRange_2bit()
+		{
+			TestDoubleRange(2);
+		}
+		
+		private void  TestSorting(int precisionStep)
+		{
+			System.Random rnd = NewRandom();
+			System.String field = "field" + precisionStep;
+			// 10 random tests, the index order is ascending,
+			// so using a reverse sort field should retun descending documents
+			for (int i = 0; i < 10; i++)
+			{
+				long lower = (long) (rnd.NextDouble() * noDocs * distance) + startOffset;
+				long upper = (long) (rnd.NextDouble() * noDocs * distance) + startOffset;
+				if (lower > upper)
+				{
+					long a = lower; lower = upper; upper = a;
+				}
+				System.Int64 tempAux = (long) lower;
+				System.Int64 tempAux2 = (long) upper;
+				Query tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true);
+				TopDocs topDocs = searcher.Search(tq, null, noDocs, new Sort(new SortField(field, SortField.LONG, true)));
+				if (topDocs.totalHits == 0)
+					continue;
+				ScoreDoc[] sd = topDocs.scoreDocs;
+				Assert.IsNotNull(sd);
+				long last = System.Int64.Parse(searcher.Doc(sd[0].doc).Get(field));
+				for (int j = 1; j < sd.Length; j++)
+				{
+					long act = System.Int64.Parse(searcher.Doc(sd[j].doc).Get(field));
+					Assert.IsTrue(last > act, "Docs should be sorted backwards");
+					last = act;
+				}
+			}
+		}
+		
+        [Test]
+		public virtual void  TestSorting_8bit()
+		{
+			TestSorting(8);
+		}
+		
+        [Test]
+		public virtual void  TestSorting_6bit()
+		{
+			TestSorting(6);
+		}
+		
+        [Test]
+		public virtual void  TestSorting_4bit()
+		{
+			TestSorting(4);
+		}
+		
+        [Test]
+		public virtual void  TestSorting_2bit()
+		{
+			TestSorting(2);
+		}
+		
+        [Test]
+		public virtual void  TestEqualsAndHash()
+		{
+			System.Int64 tempAux = 10L;
+			System.Int64 tempAux2 = 20L;
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test1", 4, tempAux, tempAux2, true, true));
+			System.Int64 tempAux3 = 10L;
+			System.Int64 tempAux4 = 20L;
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test2", 4, tempAux3, tempAux4, false, true));
+			System.Int64 tempAux5 = 10L;
+			System.Int64 tempAux6 = 20L;
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test3", 4, tempAux5, tempAux6, true, false));
+			System.Int64 tempAux7 = 10L;
+			System.Int64 tempAux8 = 20L;
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test4", 4, tempAux7, tempAux8, false, false));
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			System.Int64 tempAux9 = 10L;
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test5", 4, tempAux9, null, true, true));
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			System.Int64 tempAux10 = 20L;
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test6", 4, null, tempAux10, true, true));
+			//UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'"
+			QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test7", 4, null, null, true, true));
+			System.Int64 tempAux11 = 10L;
+			System.Int64 tempAux12 = 20L;
+			System.Int64 tempAux13 = 10L;
+			System.Int64 tempAux14 = 20L;
+			QueryUtils.CheckEqual(NumericRangeQuery.NewLongRange("test8", 4, tempAux11, tempAux12, true, true), NumericRangeQuery.NewLongRange("test8", 4, tempAux13, tempAux14, true, true));
+			System.Int64 tempAux15 = 10L;
+			System.Int64 tempAux16 = 20L;
+			System.Int64 tempAux17 = 10L;
+			System.Int64 tempAux18 = 20L;
+			QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test9", 4, tempAux15, tempAux16, true, true), NumericRangeQuery.NewLongRange("test9", 8, tempAux17, tempAux18, true, true));
+			System.Int64 tempAux19 = 10L;
+			System.Int64 tempAux20 = 20L;
+			System.Int64 tempAux21 = 10L;
+			System.Int64 tempAux22 = 20L;
+			QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test10a", 4, tempAux19, tempAux20, true, true), NumericRangeQuery.NewLongRange("test10b", 4, tempAux21, tempAux22, true, true));
+			System.Int64 tempAux23 = 10L;
+			System.Int64 tempAux24 = 20L;
+			System.Int64 tempAux25 = 20L;
+			System.Int64 tempAux26 = 10L;
+			QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test11", 4, tempAux23, tempAux24, true, true), NumericRangeQuery.NewLongRange("test11", 4, tempAux25, tempAux26, true, true));
+			System.Int64 tempAux27 = 10L;
+			System.Int64 tempAux28 = 20L;
+			System.Int64 tempAux29 = 10L;
+			System.Int64 tempAux30 = 20L;
+			QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test12", 4, tempAux27, tempAux28, true, true), NumericRangeQuery.NewLongRange("test12", 4, tempAux29, tempAux30, false, true));
+			System.Int64 tempAux31 = 10L;
+			System.Int64 tempAux32 = 20L;
+			System.Single tempAux33 = (float) 10f;
+			System.Single tempAux34 = (float) 20f;
+			QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test13", 4, tempAux31, tempAux32, true, true), NumericRangeQuery.NewFloatRange("test13", 4, tempAux33, tempAux34, true, true));
+			// difference to int range is tested in TestNumericRangeQuery32
+		}
+		static TestNumericRangeQuery64()
+		{
+			{
+				try
+				{
+					// set the theoretical maximum term count for 8bit (see docs for the number)
+					BooleanQuery.SetMaxClauseCount(7 * 255 * 2 + 255);
+					
+					directory = new RAMDirectory();
+					IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
+					
+					NumericField field8 = new NumericField("field8", 8, Field.Store.YES, true), field6 = new NumericField("field6", 6, Field.Store.YES, true), field4 = new NumericField("field4", 4, Field.Store.YES, true), field2 = new NumericField("field2", 2, Field.Store.YES, true), fieldNoTrie = new NumericField("field" + System.Int32.MaxValue, System.Int32.MaxValue, Field.Store.YES, true), ascfield8 = new NumericField("ascfield8", 8, Field.Store.NO, true), ascfield6 = new NumericField("ascfield6", 6, Field.Store.NO, true), ascfield4 = new NumericField("ascfield4", 4, Field.Store.NO, true), ascfield2 = new NumericField("ascfield2", 2, Field.Store.NO, true);
+					
+					Document doc = new Document();
+					// add fields, that have a distance to test general functionality
+					doc.Add(field8); doc.Add(field6); doc.Add(field4); doc.Add(field2); doc.Add(fieldNoTrie);
+					// add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
+					doc.Add(ascfield8); doc.Add(ascfield6); doc.Add(ascfield4); doc.Add(ascfield2);
+					
+					// Add a series of noDocs docs with increasing long values, by updating the fields
+					for (int l = 0; l < noDocs; l++)
+					{
+						long val = distance * l + startOffset;
+						field8.SetLongValue(val);
+						field6.SetLongValue(val);
+						field4.SetLongValue(val);
+						field2.SetLongValue(val);
+						fieldNoTrie.SetLongValue(val);
+						
+						val = l - (noDocs / 2);
+						ascfield8.SetLongValue(val);
+						ascfield6.SetLongValue(val);
+						ascfield4.SetLongValue(val);
+						ascfield2.SetLongValue(val);
+						writer.AddDocument(doc);
+					}
+					
+					writer.Optimize();
+					writer.Close();
+					searcher = new IndexSearcher(directory, true);
+				}
+				catch (System.Exception e)
+				{
+					throw new System.SystemException("", e);
+				}
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestParallelMultiSearcher.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestParallelMultiSearcher.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -15,17 +15,22 @@
  * limitations under the License.
  */
 
-using NUnit.Framework;
 using System;
 
+using NUnit.Framework;
+
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> Unit tests for the ParallelMultiSearcher </summary>
-	[TestFixture]
-	public class TestParallelMultiSearcher : TestMultiSearcher
+    [TestFixture]
+	public class TestParallelMultiSearcher:TestMultiSearcher
 	{
 		
+		public TestParallelMultiSearcher(System.String name):base(name)
+		{
+		}
+		
 		protected internal override MultiSearcher GetMultiSearcherInstance(Searcher[] searchers)
 		{
 			return new ParallelMultiSearcher(searchers);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhrasePrefixQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhrasePrefixQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,7 +27,6 @@
 using Term = Lucene.Net.Index.Term;
 using TermEnum = Lucene.Net.Index.TermEnum;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -36,22 +36,26 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestPhrasePrefixQuery.java 583534 2007-10-10 16:46:35Z mikemccand $
+	/// <version>  $Id: TestPhrasePrefixQuery.java 694004 2008-09-10 21:38:52Z mikemccand $
 	/// </version>
-	[TestFixture]
-	public class TestPhrasePrefixQuery : LuceneTestCase
+    [TestFixture]
+	public class TestPhrasePrefixQuery:LuceneTestCase
 	{
+		public TestPhrasePrefixQuery(System.String name):base(name)
+		{
+		}
+		
 		/// <summary> </summary>
 		[Test]
 		public virtual void  TestPhrasePrefix()
 		{
 			RAMDirectory indexStore = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document doc1 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document doc3 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document doc4 = new Lucene.Net.Documents.Document();
-			Lucene.Net.Documents.Document doc5 = new Lucene.Net.Documents.Document();
+			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document doc1 = new Document();
+			Document doc2 = new Document();
+			Document doc3 = new Document();
+			Document doc4 = new Document();
+			Document doc5 = new Document();
 			doc1.Add(new Field("body", "blueberry pie", Field.Store.YES, Field.Index.ANALYZED));
 			doc2.Add(new Field("body", "blueberry strudel", Field.Store.YES, Field.Index.ANALYZED));
 			doc3.Add(new Field("body", "blueberry pizza", Field.Store.YES, Field.Index.ANALYZED));
@@ -95,8 +99,8 @@
 			ScoreDoc[] result;
 			result = searcher.Search(query1, null, 1000).scoreDocs;
 			Assert.AreEqual(2, result.Length);
-
-            result = searcher.Search(query2, null, 1000).scoreDocs;
+			
+			result = searcher.Search(query2, null, 1000).scoreDocs;
 			Assert.AreEqual(0, result.Length);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPhraseQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPhraseQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,14 +19,14 @@
 
 using NUnit.Framework;
 
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
-using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using Term = Lucene.Net.Index.Term;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -36,10 +36,10 @@
 	/// </summary>
 	/// <seealso cref="TestPositionIncrement">
 	/// </seealso>
-	[TestFixture]
-	public class TestPhraseQuery : LuceneTestCase
+    [TestFixture]
+	public class TestPhraseQuery:LuceneTestCase
 	{
-		private class AnonymousClassAnalyzer : Analyzer
+		private class AnonymousClassAnalyzer:Analyzer
 		{
 			public AnonymousClassAnalyzer(TestPhraseQuery enclosingInstance)
 			{
@@ -56,8 +56,8 @@
 				{
 					return enclosingInstance;
 				}
+				
 			}
-
 			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
 				return new WhitespaceTokenizer(reader);
@@ -76,15 +76,15 @@
 		private PhraseQuery query;
 		private RAMDirectory directory;
 		
-		[SetUp]
-		public override void SetUp()
+		[Test]
+		public override void  SetUp()
 		{
 			base.SetUp();
 			directory = new RAMDirectory();
 			Analyzer analyzer = new AnonymousClassAnalyzer(this);
 			IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("repeated", "this is a repeated field - first part", Field.Store.YES, Field.Index.ANALYZED));
 			Fieldable repeatedField = new Field("repeated", "second part of a repeated field", Field.Store.YES, Field.Index.ANALYZED);
@@ -108,7 +108,7 @@
 		}
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
 			searcher.Close();
@@ -119,8 +119,8 @@
 		public virtual void  TestNotCloseEnough()
 		{
 			query.SetSlop(2);
-			query.Add(new Term("field", "one"));
-			query.Add(new Term("field", "five"));
+			query.add(new Term("field", "one"));
+			query.add(new Term("field", "five"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
 			QueryUtils.Check(query, searcher);
@@ -130,8 +130,8 @@
 		public virtual void  TestBarelyCloseEnough()
 		{
 			query.SetSlop(3);
-			query.Add(new Term("field", "one"));
-			query.Add(new Term("field", "five"));
+			query.add(new Term("field", "one"));
+			query.add(new Term("field", "five"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			QueryUtils.Check(query, searcher);
@@ -142,16 +142,16 @@
 		public virtual void  TestExact()
 		{
 			// slop is zero by default
-			query.Add(new Term("field", "four"));
-			query.Add(new Term("field", "five"));
+			query.add(new Term("field", "four"));
+			query.add(new Term("field", "five"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "exact match");
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			query = new PhraseQuery();
-			query.Add(new Term("field", "two"));
-			query.Add(new Term("field", "one"));
+			query.add(new Term("field", "two"));
+			query.add(new Term("field", "one"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length, "reverse not exact");
 			QueryUtils.Check(query, searcher);
@@ -162,19 +162,19 @@
 		{
 			// Ensures slop of 1 works with terms in order.
 			query.SetSlop(1);
-			query.Add(new Term("field", "one"));
-			query.Add(new Term("field", "two"));
+			query.add(new Term("field", "one"));
+			query.add(new Term("field", "two"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "in order");
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			// Ensures slop of 1 does not work for phrases out of order;
 			// must be at least 2.
 			query = new PhraseQuery();
 			query.SetSlop(1);
-			query.Add(new Term("field", "two"));
-			query.Add(new Term("field", "one"));
+			query.add(new Term("field", "two"));
+			query.add(new Term("field", "one"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length, "reversed, slop not 2 or more");
 			QueryUtils.Check(query, searcher);
@@ -185,17 +185,17 @@
 		public virtual void  TestOrderDoesntMatter()
 		{
 			query.SetSlop(2); // must be at least two for reverse order match
-			query.Add(new Term("field", "two"));
-			query.Add(new Term("field", "one"));
+			query.add(new Term("field", "two"));
+			query.add(new Term("field", "one"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "just sloppy enough");
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			query = new PhraseQuery();
 			query.SetSlop(2);
-			query.Add(new Term("field", "three"));
-			query.Add(new Term("field", "one"));
+			query.add(new Term("field", "three"));
+			query.add(new Term("field", "one"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length, "not sloppy enough");
 			QueryUtils.Check(query, searcher);
@@ -208,23 +208,23 @@
 		public virtual void  TestMulipleTerms()
 		{
 			query.SetSlop(2);
-			query.Add(new Term("field", "one"));
-			query.Add(new Term("field", "three"));
-			query.Add(new Term("field", "five"));
+			query.add(new Term("field", "one"));
+			query.add(new Term("field", "three"));
+			query.add(new Term("field", "five"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "two total moves");
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			query = new PhraseQuery();
 			query.SetSlop(5); // it takes six moves to match this phrase
-			query.Add(new Term("field", "five"));
-			query.Add(new Term("field", "three"));
-			query.Add(new Term("field", "one"));
+			query.add(new Term("field", "five"));
+			query.add(new Term("field", "three"));
+			query.add(new Term("field", "one"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length, "slop of 5 not close enough");
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			query.SetSlop(6);
 			hits = searcher.Search(query, null, 1000).scoreDocs;
@@ -238,7 +238,7 @@
 			RAMDirectory directory = new RAMDirectory();
 			StopAnalyzer stopAnalyzer = new StopAnalyzer();
 			IndexWriter writer = new IndexWriter(directory, stopAnalyzer, true, IndexWriter.MaxFieldLength.LIMITED);
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("field", "the stop words are here", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			writer.Close();
@@ -247,21 +247,21 @@
 			
 			// valid exact phrase query
 			PhraseQuery query = new PhraseQuery();
-			query.Add(new Term("field", "stop"));
-			query.Add(new Term("field", "words"));
+			query.add(new Term("field", "stop"));
+			query.add(new Term("field", "words"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			// currently StopAnalyzer does not leave "holes", so this matches.
 			query = new PhraseQuery();
-			query.Add(new Term("field", "words"));
-			query.Add(new Term("field", "here"));
+			query.add(new Term("field", "words"));
+			query.add(new Term("field", "here"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			QueryUtils.Check(query, searcher);
-
+			
 			
 			searcher.Close();
 		}
@@ -272,11 +272,11 @@
 			RAMDirectory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("contents", "foobar", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("source", "marketing info", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
@@ -287,34 +287,34 @@
 			IndexSearcher searcher = new IndexSearcher(directory);
 			
 			PhraseQuery phraseQuery = new PhraseQuery();
-			phraseQuery.Add(new Term("source", "marketing"));
-			phraseQuery.Add(new Term("source", "info"));
+			phraseQuery.add(new Term("source", "marketing"));
+			phraseQuery.add(new Term("source", "info"));
 			ScoreDoc[] hits = searcher.Search(phraseQuery, null, 1000).scoreDocs;
 			Assert.AreEqual(2, hits.Length);
 			QueryUtils.Check(phraseQuery, searcher);
-
+			
 			
 			TermQuery termQuery = new TermQuery(new Term("contents", "foobar"));
-			BooleanQuery boolQuery = new BooleanQuery();
-			boolQuery.Add(termQuery, BooleanClause.Occur.MUST);
-			boolQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
-			hits = searcher.Search(boolQuery, null, 1000).scoreDocs;
+			BooleanQuery booleanQuery = new BooleanQuery();
+			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
+			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
+			hits = searcher.Search(booleanQuery, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			QueryUtils.Check(termQuery, searcher);
-
+			
 			
 			searcher.Close();
 			
 			writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("contents", "map entry woo", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("contents", "woo map entry", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("contents", "map foobarword entry woo", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
@@ -325,28 +325,28 @@
 			
 			termQuery = new TermQuery(new Term("contents", "woo"));
 			phraseQuery = new PhraseQuery();
-			phraseQuery.Add(new Term("contents", "map"));
-			phraseQuery.Add(new Term("contents", "entry"));
+			phraseQuery.add(new Term("contents", "map"));
+			phraseQuery.add(new Term("contents", "entry"));
 			
 			hits = searcher.Search(termQuery, null, 1000).scoreDocs;
 			Assert.AreEqual(3, hits.Length);
 			hits = searcher.Search(phraseQuery, null, 1000).scoreDocs;
 			Assert.AreEqual(2, hits.Length);
-
 			
-			boolQuery = new BooleanQuery();
-			boolQuery.Add(termQuery, BooleanClause.Occur.MUST);
-			boolQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
-			hits = searcher.Search(boolQuery, null, 1000).scoreDocs;
+			
+			booleanQuery = new BooleanQuery();
+			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
+			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
+			hits = searcher.Search(booleanQuery, null, 1000).scoreDocs;
 			Assert.AreEqual(2, hits.Length);
 			
-			boolQuery = new BooleanQuery();
-			boolQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
-			boolQuery.Add(termQuery, BooleanClause.Occur.MUST);
-			hits = searcher.Search(boolQuery, null, 1000).scoreDocs;
+			booleanQuery = new BooleanQuery();
+			booleanQuery.Add(phraseQuery, BooleanClause.Occur.MUST);
+			booleanQuery.Add(termQuery, BooleanClause.Occur.MUST);
+			hits = searcher.Search(booleanQuery, null, 1000).scoreDocs;
 			Assert.AreEqual(2, hits.Length);
-			QueryUtils.Check(boolQuery, searcher);
-
+			QueryUtils.Check(booleanQuery, searcher);
+			
 			
 			searcher.Close();
 			directory.Close();
@@ -358,15 +358,15 @@
 			Directory directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("field", "foo firstname lastname foo", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			Lucene.Net.Documents.Document doc2 = new Lucene.Net.Documents.Document();
+			Document doc2 = new Document();
 			doc2.Add(new Field("field", "foo firstname xxx lastname foo", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc2);
 			
-			Lucene.Net.Documents.Document doc3 = new Lucene.Net.Documents.Document();
+			Document doc3 = new Document();
 			doc3.Add(new Field("field", "foo firstname xxx yyy lastname foo", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc3);
 			
@@ -375,8 +375,8 @@
 			
 			Searcher searcher = new IndexSearcher(directory);
 			PhraseQuery query = new PhraseQuery();
-			query.Add(new Term("field", "firstname"));
-			query.Add(new Term("field", "lastname"));
+			query.add(new Term("field", "firstname"));
+			query.add(new Term("field", "lastname"));
 			query.SetSlop(System.Int32.MaxValue);
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(3, hits.Length);
@@ -390,27 +390,27 @@
 			Assert.AreEqual(2, hits[2].doc);
 			QueryUtils.Check(query, searcher);
 		}
-
-        [Test]
-        public void TestToString()
-        {
-            StopAnalyzer analyzer = new StopAnalyzer();
-            StopFilter.SetEnablePositionIncrementsDefault(true);
-            QueryParser qp = new QueryParser("field", analyzer);
-            qp.SetEnablePositionIncrements(true);
-            PhraseQuery q = (PhraseQuery)qp.Parse("\"this hi this is a test is\"");
-            Assert.AreEqual("field:\"? hi ? ? ? test\"", q.ToString());
-            q.Add(new Term("field", "hello"), 1);
-            Assert.AreEqual("field:\"? hi|hello ? ? ? test\"", q.ToString());
-        }
-
-        [Test]
+		
+		[Test]
+		public virtual void  TestToString()
+		{
+			StopAnalyzer analyzer = new StopAnalyzer();
+			StopFilter.SetEnablePositionIncrementsDefault(true);
+			QueryParser qp = new QueryParser("field", analyzer);
+			qp.SetEnablePositionIncrements(true);
+			PhraseQuery q = (PhraseQuery) qp.Parse("\"this hi this is a test is\"");
+			Assert.AreEqual("field:\"? hi ? ? ? test\"", q.ToString());
+			q.Add(new Term("field", "hello"), 1);
+			Assert.AreEqual("field:\"? hi|hello ? ? ? test\"", q.ToString());
+		}
+		
+		[Test]
 		public virtual void  TestWrappedPhrase()
 		{
-			query.Add(new Term("repeated", "first"));
-			query.Add(new Term("repeated", "part"));
-			query.Add(new Term("repeated", "second"));
-			query.Add(new Term("repeated", "part"));
+			query.add(new Term("repeated", "first"));
+			query.add(new Term("repeated", "part"));
+			query.add(new Term("repeated", "second"));
+			query.add(new Term("repeated", "part"));
 			query.SetSlop(100);
 			
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
@@ -429,9 +429,9 @@
 		public virtual void  TestNonExistingPhrase()
 		{
 			// phrase without repetitions that exists in 2 docs
-			query.Add(new Term("nonexist", "phrase"));
-			query.Add(new Term("nonexist", "notexist"));
-			query.Add(new Term("nonexist", "found"));
+			query.add(new Term("nonexist", "phrase"));
+			query.add(new Term("nonexist", "notexist"));
+			query.add(new Term("nonexist", "found"));
 			query.SetSlop(2); // would be found this way
 			
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
@@ -440,9 +440,9 @@
 			
 			// phrase with repetitions that exists in 2 docs
 			query = new PhraseQuery();
-			query.Add(new Term("nonexist", "phrase"));
-			query.Add(new Term("nonexist", "exist"));
-			query.Add(new Term("nonexist", "exist"));
+			query.add(new Term("nonexist", "phrase"));
+			query.add(new Term("nonexist", "exist"));
+			query.add(new Term("nonexist", "exist"));
 			query.SetSlop(1); // would be found 
 			
 			hits = searcher.Search(query, null, 1000).scoreDocs;
@@ -451,9 +451,9 @@
 			
 			// phrase I with repetitions that does not exist in any doc
 			query = new PhraseQuery();
-			query.Add(new Term("nonexist", "phrase"));
-			query.Add(new Term("nonexist", "notexist"));
-			query.Add(new Term("nonexist", "phrase"));
+			query.add(new Term("nonexist", "phrase"));
+			query.add(new Term("nonexist", "notexist"));
+			query.add(new Term("nonexist", "phrase"));
 			query.SetSlop(1000); // would not be found no matter how high the slop is
 			
 			hits = searcher.Search(query, null, 1000).scoreDocs;
@@ -462,10 +462,10 @@
 			
 			// phrase II with repetitions that does not exist in any doc
 			query = new PhraseQuery();
-			query.Add(new Term("nonexist", "phrase"));
-			query.Add(new Term("nonexist", "exist"));
-			query.Add(new Term("nonexist", "exist"));
-			query.Add(new Term("nonexist", "exist"));
+			query.add(new Term("nonexist", "phrase"));
+			query.add(new Term("nonexist", "exist"));
+			query.add(new Term("nonexist", "exist"));
+			query.add(new Term("nonexist", "exist"));
 			query.SetSlop(1000); // would not be found no matter how high the slop is
 			
 			hits = searcher.Search(query, null, 1000).scoreDocs;
@@ -481,13 +481,14 @@
 		/// Also, in this case order in query does not matter. 
 		/// Also, when an exact match is found, both sloppy scorer and exact scorer scores the same.   
 		/// </summary>
+		[Test]
 		public virtual void  TestPalyndrome2()
 		{
 			
 			// search on non palyndrome, find phrase with no slop, using exact phrase scorer
 			query.SetSlop(0); // to use exact phrase scorer
-			query.Add(new Term("field", "two"));
-			query.Add(new Term("field", "three"));
+			query.add(new Term("field", "two"));
+			query.add(new Term("field", "three"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "phrase found with exact phrase scorer");
 			float score0 = hits[0].score;
@@ -506,8 +507,8 @@
 			// search ordered in palyndrome, find it twice
 			query = new PhraseQuery();
 			query.SetSlop(2); // must be at least two for both ordered and reversed to match
-			query.Add(new Term("palindrome", "two"));
-			query.Add(new Term("palindrome", "three"));
+			query.add(new Term("palindrome", "two"));
+			query.add(new Term("palindrome", "three"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "just sloppy enough");
 			float score2 = hits[0].score;
@@ -515,13 +516,13 @@
 			QueryUtils.Check(query, searcher);
 			
 			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
-			//assertTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
+            //Assert.IsTrue(score1+SCORE_COMP_THRESH<score2, "ordered scores higher in palindrome");
 			
 			// search reveresed in palyndrome, find it twice
 			query = new PhraseQuery();
 			query.SetSlop(2); // must be at least two for both ordered and reversed to match
-			query.Add(new Term("palindrome", "three"));
-			query.Add(new Term("palindrome", "two"));
+			query.add(new Term("palindrome", "three"));
+			query.add(new Term("palindrome", "two"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "just sloppy enough");
 			float score3 = hits[0].score;
@@ -529,8 +530,8 @@
 			QueryUtils.Check(query, searcher);
 			
 			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
-			//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
-			//assertEquals("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
+            //Assert.IsTrue(score1+SCORE_COMP_THRESH<score3,"reversed scores higher in palindrome");
+            //Assert.AreEqual(score2, score3, SCORE_COMP_THRESH,"dered or reversed does not matter");
 		}
 		
 		/// <summary> Working on a 2 fields like this:
@@ -547,9 +548,9 @@
 			
 			// search on non palyndrome, find phrase with no slop, using exact phrase scorer
 			query.SetSlop(0); // to use exact phrase scorer
-			query.Add(new Term("field", "one"));
-			query.Add(new Term("field", "two"));
-			query.Add(new Term("field", "three"));
+			query.add(new Term("field", "one"));
+			query.add(new Term("field", "two"));
+			query.add(new Term("field", "three"));
 			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "phrase found with exact phrase scorer");
 			float score0 = hits[0].score;
@@ -568,9 +569,9 @@
 			// search ordered in palyndrome, find it twice
 			query = new PhraseQuery();
 			query.SetSlop(4); // must be at least four for both ordered and reversed to match
-			query.Add(new Term("palindrome", "one"));
-			query.Add(new Term("palindrome", "two"));
-			query.Add(new Term("palindrome", "three"));
+			query.add(new Term("palindrome", "one"));
+			query.add(new Term("palindrome", "two"));
+			query.add(new Term("palindrome", "three"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "just sloppy enough");
 			float score2 = hits[0].score;
@@ -578,14 +579,14 @@
 			QueryUtils.Check(query, searcher);
 			
 			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
-			//assertTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
+            //Assert.IsTrue(score1+SCORE_COMP_THRESH<score2,"ordered scores higher in palindrome");
 			
 			// search reveresed in palyndrome, find it twice
 			query = new PhraseQuery();
 			query.SetSlop(4); // must be at least four for both ordered and reversed to match
-			query.Add(new Term("palindrome", "three"));
-			query.Add(new Term("palindrome", "two"));
-			query.Add(new Term("palindrome", "one"));
+			query.add(new Term("palindrome", "three"));
+			query.add(new Term("palindrome", "two"));
+			query.add(new Term("palindrome", "one"));
 			hits = searcher.Search(query, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length, "just sloppy enough");
 			float score3 = hits[0].score;
@@ -593,18 +594,17 @@
 			QueryUtils.Check(query, searcher);
 			
 			//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq(). 
-			//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
-			//assertEquals("ordered or reversed does not matter",score2, score3, SCORE_COMP_THRESH);
+            //Assert.IsTrue(score1+SCORE_COMP_THRESH<score3,"reversed scores higher in palindrome");
+            //Assert.AreEqual(score2, score3, SCORE_COMP_THRESH, "ordered or reversed does not matter");
+		}
+		
+		// LUCENE-1280
+		[Test]
+		public virtual void  TestEmptyPhraseQuery()
+		{
+			BooleanQuery q2 = new BooleanQuery();
+			q2.Add(new PhraseQuery(), BooleanClause.Occur.MUST);
+			q2.ToString();
 		}
-
-        // LUCENE-1280
-        [Test]
-        public void TestEmptyPhraseQuery()
-        {
-            PhraseQuery q1 = new PhraseQuery();
-            BooleanQuery q2 = new BooleanQuery();
-            q2.Add(new PhraseQuery(), BooleanClause.Occur.MUST);
-            q2.ToString();
-        }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPositionIncrement.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,19 +19,31 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
+using StopFilter = Lucene.Net.Analysis.StopFilter;
+using TokenFilter = Lucene.Net.Analysis.TokenFilter;
+using TokenStream = Lucene.Net.Analysis.TokenStream;
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
+using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
+using PositionIncrementAttribute = Lucene.Net.Analysis.Tokenattributes.PositionIncrementAttribute;
+using TermAttribute = Lucene.Net.Analysis.Tokenattributes.TermAttribute;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Payload = Lucene.Net.Index.Payload;
 using Term = Lucene.Net.Index.Term;
+using TermPositions = Lucene.Net.Index.TermPositions;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using Analyzer = Lucene.Net.Analysis.Analyzer;
-using StopAnalyzer = Lucene.Net.Analysis.StopAnalyzer;
-using StopFilter = Lucene.Net.Analysis.StopFilter;
-using Token = Lucene.Net.Analysis.Token;
-using TokenStream = Lucene.Net.Analysis.TokenStream;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using BaseTokenStreamTestCase = Lucene.Net.Analysis.BaseTokenStreamTestCase;
+using PayloadSpanUtil = Lucene.Net.Search.Payloads.PayloadSpanUtil;
+using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
 
 namespace Lucene.Net.Search
 {
@@ -40,19 +52,17 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Revision: 607591 $
+	/// <version>  $Revision: 806844 $
 	/// </version>
-	[TestFixture]
-	public class TestPositionIncrement : LuceneTestCase
+	public class TestPositionIncrement:BaseTokenStreamTestCase
 	{
-		private class AnonymousClassAnalyzer : Analyzer
+		private class AnonymousClassAnalyzer:Analyzer
 		{
 			public AnonymousClassAnalyzer(TestPositionIncrement enclosingInstance)
 			{
 				InitBlock(enclosingInstance);
 			}
-
-			private class AnonymousClassTokenStream : TokenStream
+			private class AnonymousClassTokenStream:TokenStream
 			{
 				public AnonymousClassTokenStream(AnonymousClassAnalyzer enclosingInstance)
 				{
@@ -61,6 +71,9 @@
 				private void  InitBlock(AnonymousClassAnalyzer enclosingInstance)
 				{
 					this.enclosingInstance = enclosingInstance;
+					posIncrAtt = (PositionIncrementAttribute) AddAttribute(typeof(PositionIncrementAttribute));
+					termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
+					offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
 				}
 				private AnonymousClassAnalyzer enclosingInstance;
 				public AnonymousClassAnalyzer Enclosing_Instance
@@ -71,20 +84,23 @@
 					}
 					
 				}
-
 				private System.String[] TOKENS = new System.String[]{"1", "2", "3", "4", "5"};
-				private int[] INCREMENTS = new int[]{1, 2, 1, 0, 1};
+				private int[] INCREMENTS = new int[]{0, 2, 1, 0, 1};
 				private int i = 0;
 				
-				public override Token Next(Token reusableToken)
+				internal PositionIncrementAttribute posIncrAtt;
+				internal TermAttribute termAtt;
+				internal OffsetAttribute offsetAtt;
+				
+				public override bool IncrementToken()
 				{
-                    System.Diagnostics.Debug.Assert(reusableToken != null);
 					if (i == TOKENS.Length)
-						return null;
-					reusableToken.Reinit(TOKENS[i], i, i);
-					reusableToken.SetPositionIncrement(INCREMENTS[i]);
+						return false;
+					termAtt.SetTermBuffer(TOKENS[i]);
+					offsetAtt.SetOffset(i, i);
+					posIncrAtt.SetPositionIncrement(INCREMENTS[i]);
 					i++;
-					return reusableToken;
+					return true;
 				}
 			}
 			private void  InitBlock(TestPositionIncrement enclosingInstance)
@@ -106,38 +122,11 @@
 			}
 		}
 		
-		private class AnonymousClassAnalyzer1 : Analyzer
-		{
-			public AnonymousClassAnalyzer1(TestPositionIncrement enclosingInstance)
-			{
-				InitBlock(enclosingInstance);
-			}
-			private void  InitBlock(TestPositionIncrement enclosingInstance)
-			{
-				this.enclosingInstance = enclosingInstance;
-			}
-			private TestPositionIncrement enclosingInstance;
-			public TestPositionIncrement Enclosing_Instance
-			{
-				get
-				{
-					return enclosingInstance;
-				}
-				
-			}
-			internal WhitespaceAnalyzer a = new WhitespaceAnalyzer();
-			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
-			{
-				TokenStream ts = a.TokenStream(fieldName, reader);
-				return new StopFilter(ts, new System.String[]{"stop"});
-			}
-		}
-		
 		[Test]
 		public virtual void  TestSetPosition()
 		{
 			Analyzer analyzer = new AnonymousClassAnalyzer(this);
-			RAMDirectory store = new RAMDirectory();
+			Directory store = new MockRAMDirectory();
 			IndexWriter writer = new IndexWriter(store, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 			Document d = new Document();
 			d.Add(new Field("field", "bogus", Field.Store.YES, Field.Index.ANALYZED));
@@ -145,13 +134,25 @@
 			writer.Optimize();
 			writer.Close();
 			
+			
 			IndexSearcher searcher = new IndexSearcher(store);
+			
+			TermPositions pos = searcher.GetIndexReader().TermPositions(new Term("field", "1"));
+			pos.Next();
+			// first token should be at position 0
+			Assert.AreEqual(0, pos.NextPosition());
+			
+			pos = searcher.GetIndexReader().TermPositions(new Term("field", "2"));
+			pos.Next();
+			// second token should be at position 2
+			Assert.AreEqual(2, pos.NextPosition());
+			
 			PhraseQuery q;
 			ScoreDoc[] hits;
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "1"));
-			q.Add(new Term("field", "2"));
+			q.add(new Term("field", "1"));
+			q.add(new Term("field", "2"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
 			
@@ -170,14 +171,14 @@
 			Assert.AreEqual(1, hits.Length);
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "2"));
-			q.Add(new Term("field", "3"));
+			q.add(new Term("field", "2"));
+			q.add(new Term("field", "3"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "3"));
-			q.Add(new Term("field", "4"));
+			q.add(new Term("field", "3"));
+			q.add(new Term("field", "4"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
 			
@@ -204,34 +205,31 @@
 			Assert.AreEqual(1, hits.Length);
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "2"));
-			q.Add(new Term("field", "4"));
+			q.add(new Term("field", "2"));
+			q.add(new Term("field", "4"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "3"));
-			q.Add(new Term("field", "5"));
+			q.add(new Term("field", "3"));
+			q.add(new Term("field", "5"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "4"));
-			q.Add(new Term("field", "5"));
+			q.add(new Term("field", "4"));
+			q.add(new Term("field", "5"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			
 			q = new PhraseQuery();
-			q.Add(new Term("field", "2"));
-			q.Add(new Term("field", "5"));
+			q.add(new Term("field", "2"));
+			q.add(new Term("field", "5"));
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
 			
-			// analyzer to introduce stopwords and increment gaps 
-			Analyzer stpa = new AnonymousClassAnalyzer1(this);
-			
 			// should not find "1 2" because there is a gap of 1 in the index
-			Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field", stpa);
+			QueryParser qp = new QueryParser("field", new StopWhitespaceAnalyzer(false));
 			q = (PhraseQuery) qp.Parse("\"1 2\"");
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
@@ -242,47 +240,198 @@
 			Assert.AreEqual(0, hits.Length);
 			
 			// query parser alone won't help, because stop filter swallows the increments. 
-            bool dflt = StopFilter.GetEnablePositionIncrementsDefault();
-            StopFilter.SetEnablePositionIncrementsDefault(false);
-            qp.SetEnablePositionIncrements(true);
+			qp.SetEnablePositionIncrements(true);
 			q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
 			hits = searcher.Search(q, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
 			
-			try
+			// stop filter alone won't help, because query parser swallows the increments. 
+			qp.SetEnablePositionIncrements(false);
+			q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
+			hits = searcher.Search(q, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length);
+			
+			// when both qp qnd stopFilter propagate increments, we should find the doc.
+			qp = new QueryParser("field", new StopWhitespaceAnalyzer(true));
+			qp.SetEnablePositionIncrements(true);
+			q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
+			hits = searcher.Search(q, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
+		}
+		
+		private class StopWhitespaceAnalyzer:Analyzer
+		{
+			internal bool enablePositionIncrements;
+			internal WhitespaceAnalyzer a = new WhitespaceAnalyzer();
+			public StopWhitespaceAnalyzer(bool enablePositionIncrements)
 			{
-				// stop filter alone won't help, because query parser swallows the increments. 
-				qp.SetEnablePositionIncrements(false);
-				StopFilter.SetEnablePositionIncrementsDefault(true);
-				q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
-				hits = searcher.Search(q, null, 1000).scoreDocs;
-				Assert.AreEqual(0, hits.Length);
-				
-				// when both qp qnd stopFilter propagate increments, we should find the doc.
-				qp.SetEnablePositionIncrements(true);
-				q = (PhraseQuery) qp.Parse("\"1 stop 2\"");
-				hits = searcher.Search(q, null, 1000).scoreDocs;
-				Assert.AreEqual(1, hits.Length);
+				this.enablePositionIncrements = enablePositionIncrements;
 			}
-			finally
+			public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 			{
-				StopFilter.SetEnablePositionIncrementsDefault(dflt);
+				TokenStream ts = a.TokenStream(fieldName, reader);
+				return new StopFilter(enablePositionIncrements, ts, new System.String[]{"stop"});
 			}
 		}
 		
-		/// <summary> Basic analyzer behavior should be to keep sequential terms in one
-		/// increment from one another.
-		/// </summary>
 		[Test]
-		public virtual void  TestIncrementingPositions()
+		public virtual void  TestPayloadsPos0()
 		{
-			Analyzer analyzer = new WhitespaceAnalyzer();
-			TokenStream ts = analyzer.TokenStream("field", new System.IO.StringReader("one two three four five"));
-
-            Token reusableToken = new Token();
-			for (Token nextToken = ts.Next(reusableToken); nextToken != null; nextToken = ts.Next(reusableToken))
+			for (int x = 0; x < 2; x++)
+			{
+				Directory dir = new MockRAMDirectory();
+				IndexWriter writer = new IndexWriter(dir, new TestPayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+				if (x == 1)
+				{
+					writer.SetAllowMinus1Position();
+				}
+				Document doc = new Document();
+				doc.Add(new Field("content", new System.IO.StreamReader("a a b c d e a f g h i j a b k k")));
+				writer.AddDocument(doc);
+				
+				IndexReader r = writer.GetReader();
+				
+				TermPositions tp = r.TermPositions(new Term("content", "a"));
+				int count = 0;
+				Assert.IsTrue(tp.Next());
+				// "a" occurs 4 times
+				Assert.AreEqual(4, tp.Freq());
+				int expected;
+				if (x == 1)
+				{
+					expected = System.Int32.MaxValue;
+				}
+				else
+				{
+					expected = 0;
+				}
+				Assert.AreEqual(expected, tp.NextPosition());
+				if (x == 1)
+				{
+					continue;
+				}
+				Assert.AreEqual(1, tp.NextPosition());
+				Assert.AreEqual(3, tp.NextPosition());
+				Assert.AreEqual(6, tp.NextPosition());
+				
+				// only one doc has "a"
+				Assert.IsFalse(tp.Next());
+				
+				IndexSearcher is_Renamed = new IndexSearcher(r);
+				
+				SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
+				SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
+				SpanQuery[] sqs = new SpanQuery[]{stq1, stq2};
+				SpanNearQuery snq = new SpanNearQuery(sqs, 30, false);
+				
+				count = 0;
+				bool sawZero = false;
+				//System.out.println("\ngetPayloadSpans test");
+				Lucene.Net.Search.Spans.Spans pspans = snq.GetSpans(is_Renamed.GetIndexReader());
+				while (pspans.Next())
+				{
+					//System.out.println(pspans.doc() + " - " + pspans.start() + " - "+ pspans.end());
+					System.Collections.ICollection payloads = pspans.GetPayload();
+					sawZero |= pspans.Start() == 0;
+					for (System.Collections.IEnumerator it = payloads.GetEnumerator(); it.MoveNext(); )
+					{
+						count++;
+						System.Object generatedAux2 = it.Current;
+						//System.out.println(new String((byte[]) it.next()));
+					}
+				}
+				Assert.AreEqual(5, count);
+				Assert.IsTrue(sawZero);
+				
+				//System.out.println("\ngetSpans test");
+				Lucene.Net.Search.Spans.Spans spans = snq.GetSpans(is_Renamed.GetIndexReader());
+				count = 0;
+				sawZero = false;
+				while (spans.Next())
+				{
+					count++;
+					sawZero |= spans.Start() == 0;
+					//System.out.println(spans.doc() + " - " + spans.start() + " - " + spans.end());
+				}
+				Assert.AreEqual(4, count);
+				Assert.IsTrue(sawZero);
+				
+				//System.out.println("\nPayloadSpanUtil test");
+				
+				sawZero = false;
+				PayloadSpanUtil psu = new PayloadSpanUtil(is_Renamed.GetIndexReader());
+				System.Collections.Generic.ICollection<byte[]> pls = psu.GetPayloadsForQuery(snq);
+				count = pls.Count;
+				for (System.Collections.IEnumerator it = pls.GetEnumerator(); it.MoveNext(); )
+				{
+					System.String s = new System.String(System.Text.UTF8Encoding.UTF8.GetChars((byte[]) it.Current));
+					//System.out.println(s);
+					sawZero |= s.Equals("pos: 0");
+				}
+				Assert.AreEqual(5, count);
+				Assert.IsTrue(sawZero);
+				writer.Close();
+				is_Renamed.GetIndexReader().Close();
+				dir.Close();
+			}
+		}
+	}
+	
+	class TestPayloadAnalyzer:Analyzer
+	{
+		
+		public override TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
+		{
+			TokenStream result = new LowerCaseTokenizer(reader);
+			return new PayloadFilter(result, fieldName);
+		}
+	}
+	
+	class PayloadFilter:TokenFilter
+	{
+		internal System.String fieldName;
+		
+		internal int pos;
+		
+		internal int i;
+		
+		internal PositionIncrementAttribute posIncrAttr;
+		internal PayloadAttribute payloadAttr;
+		internal TermAttribute termAttr;
+		
+		public PayloadFilter(TokenStream input, System.String fieldName):base(input)
+		{
+			this.fieldName = fieldName;
+			pos = 0;
+			i = 0;
+			posIncrAttr = (PositionIncrementAttribute) input.AddAttribute(typeof(PositionIncrementAttribute));
+			payloadAttr = (PayloadAttribute) input.AddAttribute(typeof(PayloadAttribute));
+			termAttr = (TermAttribute) input.AddAttribute(typeof(TermAttribute));
+		}
+		
+		public override bool IncrementToken()
+		{
+			if (input.IncrementToken())
+			{
+				payloadAttr.SetPayload(new Payload(System.Text.UTF8Encoding.UTF8.GetBytes("pos: " + pos)));
+				int posIncr;
+				if (i % 2 == 1)
+				{
+					posIncr = 1;
+				}
+				else
+				{
+					posIncr = 0;
+				}
+				posIncrAttr.SetPositionIncrement(posIncr);
+				pos += posIncr;
+				// System.out.println("term=" + termAttr.term() + " pos=" + pos);
+				i++;
+				return true;
+			}
+			else
 			{
-				Assert.AreEqual(1, nextToken.GetPositionIncrement(), nextToken.TermText());
+				return false;
 			}
 		}
 	}

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestPositiveScoresOnlyCollector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPositiveScoresOnlyCollector.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPositiveScoresOnlyCollector.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPositiveScoresOnlyCollector.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,125 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestPositiveScoresOnlyCollector:LuceneTestCase
+	{
+		
+		private sealed class SimpleScorer:Scorer
+		{
+			private int idx = - 1;
+			
+			public SimpleScorer():base(null)
+			{
+			}
+			
+			public override Explanation Explain(int doc)
+			{
+				return null;
+			}
+			
+			public override float Score()
+			{
+				return idx == Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores.Length?System.Single.NaN:Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores[idx];
+			}
+			
+			/// <deprecated> use {@link #DocID()} instead. 
+			/// </deprecated>
+			public override int Doc()
+			{
+				return idx;
+			}
+			
+			public override int DocID()
+			{
+				return idx;
+			}
+			
+			/// <deprecated> use {@link #NextDoc()} instead. 
+			/// </deprecated>
+			public override bool Next()
+			{
+				return NextDoc() != NO_MORE_DOCS;
+			}
+			
+			public override int NextDoc()
+			{
+				return ++idx != Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores.Length?idx:NO_MORE_DOCS;
+			}
+			
+			/// <deprecated> use {@link #Advance(int)} instead. 
+			/// </deprecated>
+			public override bool SkipTo(int target)
+			{
+				return Advance(target) != NO_MORE_DOCS;
+			}
+			
+			public override int Advance(int target)
+			{
+				idx = target;
+				return idx < Lucene.Net.Search.TestPositiveScoresOnlyCollector.scores.Length?idx:NO_MORE_DOCS;
+			}
+		}
+		
+		// The scores must have positive as well as negative values
+		private static readonly float[] scores = new float[]{0.7767749f, - 1.7839992f, 8.9925785f, 7.9608946f, - 0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f, - 8.108544f, 4.961808f, 2.2423935f, - 7.285586f, 4.6699767f};
+		
+        [Test]
+		public virtual void  TestNegativeScores()
+		{
+			
+			// The Top*Collectors previously filtered out documents with <= scores. This
+			// behavior has changed. This test checks that if PositiveOnlyScoresFilter
+			// wraps one of these collectors, documents with <= 0 scores are indeed
+			// filtered.
+			
+			int numPositiveScores = 0;
+			for (int i = 0; i < scores.Length; i++)
+			{
+				if (scores[i] > 0)
+				{
+					++numPositiveScores;
+				}
+			}
+			
+			Scorer s = new SimpleScorer();
+			TopDocsCollector tdc = TopScoreDocCollector.create(scores.Length, true);
+			Collector c = new PositiveScoresOnlyCollector(tdc);
+			c.SetScorer(s);
+			while (s.NextDoc() != DocIdSetIterator.NO_MORE_DOCS)
+			{
+				c.Collect(0);
+			}
+			TopDocs td = tdc.TopDocs();
+			ScoreDoc[] sd = td.scoreDocs;
+			Assert.AreEqual(numPositiveScores, td.totalHits);
+			for (int i = 0; i < sd.Length; i++)
+			{
+				Assert.IsTrue(sd[i].score > 0, "only positive scores should return: " + sd[i].score);
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixFilter.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixFilter.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,12 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -33,11 +33,11 @@
 	/// <summary> Tests {@link PrefixFilter} class.
 	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestPrefixFilter : LuceneTestCase
+    [TestFixture]
+	public class TestPrefixFilter:LuceneTestCase
 	{
 		[Test]
-		public virtual void  TestPrefixFilter_Renamed_Method()
+		public virtual void  TestPrefixFilter_Renamed()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestPrefixQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestPrefixQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,12 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -33,13 +33,11 @@
 	/// <summary> Tests {@link PrefixQuery} class.
 	/// 
 	/// </summary>
-	/// <author>  Erik Hatcher
-	/// </author>
-	[TestFixture]
-	public class TestPrefixQuery : LuceneTestCase
+    [TestFixture]
+	public class TestPrefixQuery:LuceneTestCase
 	{
 		[Test]
-		public virtual void  TestPrefixQuery_Renamed_Method()
+		public virtual void  TestPrefixQuery_Renamed()
 		{
 			RAMDirectory directory = new RAMDirectory();
 			
@@ -47,7 +45,7 @@
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			for (int i = 0; i < categories.Length; i++)
 			{
-				Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+				Document doc = new Document();
 				doc.Add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
 				writer.AddDocument(doc);
 			}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestQueryTermVector.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryTermVector.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -24,10 +24,16 @@
 
 namespace Lucene.Net.Search
 {
-	[TestFixture]
-	public class TestQueryTermVector : LuceneTestCase
+	
+    [TestFixture]
+	public class TestQueryTermVector:LuceneTestCase
 	{
 		
+		
+		public TestQueryTermVector(System.String s):base(s)
+		{
+		}
+		
 		[Test]
 		public virtual void  TestConstructor()
 		{

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryWrapperFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestQueryWrapperFilter.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryWrapperFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestQueryWrapperFilter.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,77 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Index = Lucene.Net.Documents.Field.Index;
+using Store = Lucene.Net.Documents.Field.Store;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using Directory = Lucene.Net.Store.Directory;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using Occur = Lucene.Net.Search.BooleanClause.Occur;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestQueryWrapperFilter:LuceneTestCase
+	{
+		
+        [Test]
+		public virtual void  TestBasic()
+		{
+			Directory dir = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			Document doc = new Document();
+			doc.Add(new Field("field", "value", Field.Store.NO, Field.Index.ANALYZED));
+			writer.AddDocument(doc);
+			writer.Close();
+			
+			TermQuery termQuery = new TermQuery(new Term("field", "value"));
+			
+			// should not throw exception with primitive query
+			QueryWrapperFilter qwf = new QueryWrapperFilter(termQuery);
+			
+			IndexSearcher searcher = new IndexSearcher(dir, true);
+			TopDocs hits = searcher.Search(new MatchAllDocsQuery(), qwf, 10);
+			Assert.AreEqual(1, hits.totalHits);
+			
+			// should not throw exception with complex primitive query
+			BooleanQuery booleanQuery = new BooleanQuery();
+			booleanQuery.Add(termQuery, Occur.MUST);
+			booleanQuery.Add(new TermQuery(new Term("field", "missing")), Occur.MUST_NOT);
+			qwf = new QueryWrapperFilter(termQuery);
+			
+			hits = searcher.Search(new MatchAllDocsQuery(), qwf, 10);
+			Assert.AreEqual(1, hits.totalHits);
+			
+			// should not throw exception with non primitive Query (doesn't implement
+			// Query#createWeight)
+			qwf = new QueryWrapperFilter(new FuzzyQuery(new Term("field", "valu")));
+			
+			hits = searcher.Search(new MatchAllDocsQuery(), qwf, 10);
+			Assert.AreEqual(1, hits.totalHits);
+		}
+	}
+}
\ No newline at end of file



Mime
View raw message