lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r832486 [20/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...
Date Tue, 03 Nov 2009 18:06:38 GMT
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestElevationComparator.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestElevationComparator.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestElevationComparator.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestElevationComparator.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,219 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using Lucene.Net.Index;
+using Lucene.Net.Store;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestElevationComparator:LuceneTestCase
+	{
+		
+		private System.Collections.IDictionary priority = new System.Collections.Hashtable();
+		
+		//@Test
+        [Test]
+		public virtual void  TestSorting()
+		{
+			Directory directory = new MockRAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			writer.SetMaxBufferedDocs(2);
+			writer.SetMergeFactor(1000);
+			writer.AddDocument(Adoc(new System.String[]{"id", "a", "title", "ipod", "str_s", "a"}));
+			writer.AddDocument(Adoc(new System.String[]{"id", "b", "title", "ipod ipod", "str_s", "b"}));
+			writer.AddDocument(Adoc(new System.String[]{"id", "c", "title", "ipod ipod ipod", "str_s", "c"}));
+			writer.AddDocument(Adoc(new System.String[]{"id", "x", "title", "boosted", "str_s", "x"}));
+			writer.AddDocument(Adoc(new System.String[]{"id", "y", "title", "boosted boosted", "str_s", "y"}));
+			writer.AddDocument(Adoc(new System.String[]{"id", "z", "title", "boosted boosted boosted", "str_s", "z"}));
+			
+			IndexReader r = writer.GetReader();
+			writer.Close();
+			
+			IndexSearcher searcher = new IndexSearcher(r);
+			
+			RunTest(searcher, true);
+			RunTest(searcher, false);
+			
+			searcher.Close();
+			r.Close();
+			directory.Close();
+		}
+		
+		private void  RunTest(IndexSearcher searcher, bool reversed)
+		{
+			
+			BooleanQuery newq = new BooleanQuery(false);
+			TermQuery query = new TermQuery(new Term("title", "ipod"));
+			
+			newq.Add(query, BooleanClause.Occur.SHOULD);
+			newq.Add(GetElevatedQuery(new System.String[]{"id", "a", "id", "x"}), BooleanClause.Occur.SHOULD);
+			
+			Sort sort = new Sort(new SortField[]{new SortField("id", new ElevationComparatorSource(priority), false), new SortField(null, SortField.SCORE, reversed)});
+			
+			TopDocsCollector topCollector = TopFieldCollector.create(sort, 50, false, true, true, true);
+			searcher.Search(newq, null, topCollector);
+			
+			TopDocs topDocs = topCollector.TopDocs(0, 10);
+			int nDocsReturned = topDocs.scoreDocs.Length;
+			
+			Assert.AreEqual(4, nDocsReturned);
+			
+			// 0 & 3 were elevated
+			Assert.AreEqual(0, topDocs.scoreDocs[0].doc);
+			Assert.AreEqual(3, topDocs.scoreDocs[1].doc);
+			
+			if (reversed)
+			{
+				Assert.AreEqual(2, topDocs.scoreDocs[2].doc);
+				Assert.AreEqual(1, topDocs.scoreDocs[3].doc);
+			}
+			else
+			{
+				Assert.AreEqual(1, topDocs.scoreDocs[2].doc);
+				Assert.AreEqual(2, topDocs.scoreDocs[3].doc);
+			}
+			
+			/*
+			for (int i = 0; i < nDocsReturned; i++) {
+			ScoreDoc scoreDoc = topDocs.scoreDocs[i];
+			ids[i] = scoreDoc.doc;
+			scores[i] = scoreDoc.score;
+			documents[i] = searcher.doc(ids[i]);
+			System.out.println("ids[i] = " + ids[i]);
+			System.out.println("documents[i] = " + documents[i]);
+			System.out.println("scores[i] = " + scores[i]);
+			}
+			*/
+		}
+		
+		private Query GetElevatedQuery(System.String[] vals)
+		{
+			BooleanQuery q = new BooleanQuery(false);
+			q.SetBoost(0);
+			int max = (vals.Length / 2) + 5;
+			for (int i = 0; i < vals.Length - 1; i += 2)
+			{
+				q.Add(new TermQuery(new Term(vals[i], vals[i + 1])), BooleanClause.Occur.SHOULD);
+				priority[vals[i + 1]] = (System.Int32) max--;
+				// System.out.println(" pri doc=" + vals[i+1] + " pri=" + (1+max));
+			}
+			return q;
+		}
+		
+		private Document Adoc(System.String[] vals)
+		{
+			Document doc = new Document();
+			for (int i = 0; i < vals.Length - 2; i += 2)
+			{
+				doc.Add(new Field(vals[i], vals[i + 1], Field.Store.YES, Field.Index.ANALYZED));
+			}
+			return doc;
+		}
+	}
+	
+	[Serializable]
+	class ElevationComparatorSource:FieldComparatorSource
+	{
+		private class AnonymousClassFieldComparator:FieldComparator
+		{
+			public AnonymousClassFieldComparator(int numHits, System.String fieldname, ElevationComparatorSource enclosingInstance)
+			{
+				InitBlock(numHits, fieldname, enclosingInstance);
+			}
+			private void  InitBlock(int numHits, System.String fieldname, ElevationComparatorSource enclosingInstance)
+			{
+				this.numHits = numHits;
+				this.fieldname = fieldname;
+				this.enclosingInstance = enclosingInstance;
+				values = new int[numHits];
+			}
+			private int numHits;
+			private System.String fieldname;
+			private ElevationComparatorSource enclosingInstance;
+			public ElevationComparatorSource Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			
+			internal Lucene.Net.Search.StringIndex idIndex;
+			private int[] values;
+			internal int bottomVal;
+			
+			public override int Compare(int slot1, int slot2)
+			{
+				return values[slot2] - values[slot1]; // values will be small enough that there is no overflow concern
+			}
+			
+			public override void  SetBottom(int slot)
+			{
+				bottomVal = values[slot];
+			}
+			
+			private int DocVal(int doc)
+			{
+				System.String id = idIndex.lookup[idIndex.order[doc]];
+				System.Int32 prio = (System.Int32) Enclosing_Instance.priority[id];
+				return Enclosing_Instance.priority[id] == null ? 0 : prio;
+			}
+			
+			public override int CompareBottom(int doc)
+			{
+				return DocVal(doc) - bottomVal;
+			}
+			
+			public override void  Copy(int slot, int doc)
+			{
+				values[slot] = DocVal(doc);
+			}
+			
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				idIndex = Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetStringIndex(reader, fieldname);
+			}
+			
+			public override System.IComparable Value(int slot)
+			{
+				return (System.Int32) values[slot];
+			}
+		}
+		private System.Collections.IDictionary priority;
+		
+		public ElevationComparatorSource(System.Collections.IDictionary boosts)
+		{
+			this.priority = boosts;
+		}
+		
+		public override FieldComparator NewComparator(System.String fieldname, int numHits, int sortPos, bool reversed)
+		{
+			return new AnonymousClassFieldComparator(numHits, fieldname, this);
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestExplanations.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestExplanations.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,55 +19,55 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using ParseException = Lucene.Net.QueryParsers.ParseException;
 using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using Lucene.Net.Search.Spans;
+using SpanFirstQuery = Lucene.Net.Search.Spans.SpanFirstQuery;
+using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+using SpanNotQuery = Lucene.Net.Search.Spans.SpanNotQuery;
+using SpanOrQuery = Lucene.Net.Search.Spans.SpanOrQuery;
+using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 
 namespace Lucene.Net.Search
 {
 	
-	/// <summary> Tests primative queries (ie: that rewrite to themselves) to
+	/// <summary> Tests primitive queries (ie: that rewrite to themselves) to
 	/// insure they match the expected set of docs, and that the score of each
 	/// match is equal to the value of the scores explanation.
 	/// 
 	/// <p>
-	/// The assumption is that if all of the "primative" queries work well,
-	/// then anythingthat rewrites to a primative will work well also.
+	/// The assumption is that if all of the "primitive" queries work well,
+	/// then anything that rewrites to a primitive will work well also.
 	/// </p>
 	/// 
 	/// </summary>
 	/// <seealso cref=""Subclasses for actual tests"">
 	/// </seealso>
-	[TestFixture]
-	public class TestExplanations : LuceneTestCase
+    [TestFixture]
+	public class TestExplanations:LuceneTestCase
 	{
 		protected internal IndexSearcher searcher;
 		
+		public const System.String KEY = "KEY";
 		public const System.String FIELD = "field";
-		public static readonly Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser(FIELD, new WhitespaceAnalyzer());
+		public static readonly QueryParser qp = new QueryParser(FIELD, new WhitespaceAnalyzer());
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			base.TearDown();
-			if (searcher != null)
-			{
-				searcher.Close();
-				searcher = null;
-			}
+			searcher.Close();
 		}
 		
 		[SetUp]
-		public override void SetUp()
+		public override void  SetUp()
 		{
 			base.SetUp();
 			RAMDirectory directory = new RAMDirectory();
@@ -75,6 +75,7 @@
 			for (int i = 0; i < docFields.Length; i++)
 			{
 				Document doc = new Document();
+				doc.Add(new Field(KEY, "" + i, Field.Store.NO, Field.Index.NOT_ANALYZED));
 				doc.Add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
 				writer.AddDocument(doc);
 			}
@@ -102,11 +103,11 @@
 		}
 		
 		/// <summary> Tests a query using qtest after wrapping it with both optB and reqB</summary>
-		/// <seealso cref="Qtest">
+		/// <seealso cref="qtest">
 		/// </seealso>
-		/// <seealso cref="ReqB">
+		/// <seealso cref="reqB">
 		/// </seealso>
-		/// <seealso cref="OptB">
+		/// <seealso cref="optB">
 		/// </seealso>
 		public virtual void  Bqtest(Query q, int[] expDocNrs)
 		{
@@ -114,45 +115,36 @@
 			Qtest(OptB(q), expDocNrs);
 		}
 		/// <summary> Tests a query using qtest after wrapping it with both optB and reqB</summary>
-		/// <seealso cref="Qtest">
+		/// <seealso cref="qtest">
 		/// </seealso>
-		/// <seealso cref="ReqB">
+		/// <seealso cref="reqB">
 		/// </seealso>
-		/// <seealso cref="OptB">
+		/// <seealso cref="optB">
 		/// </seealso>
 		public virtual void  Bqtest(System.String queryText, int[] expDocNrs)
 		{
 			Bqtest(MakeQuery(queryText), expDocNrs);
 		}
 		
-		/// <summary>A filter that only lets the specified document numbers pass </summary>
+		/// <summary> Convenience subclass of FieldCacheTermsFilter</summary>
 		[Serializable]
-		public class ItemizedFilter : Filter
+		public class ItemizedFilter:FieldCacheTermsFilter
 		{
-			internal int[] docs;
-			public ItemizedFilter(int[] docs)
+			private static System.String[] int2str(int[] terms)
+			{
+				System.String[] out_Renamed = new System.String[terms.Length];
+				for (int i = 0; i < terms.Length; i++)
+				{
+					out_Renamed[i] = "" + terms[i];
+				}
+				return out_Renamed;
+			}
+			public ItemizedFilter(System.String keyField, int[] keys):base(keyField, int2str(keys))
+			{
+			}
+			public ItemizedFilter(int[] keys):base(Lucene.Net.Search.TestExplanations.KEY, int2str(keys))
 			{
-				this.docs = docs;
 			}
-            public override DocIdSet GetDocIdSet(IndexReader r)
-            {
-                System.Collections.BitArray b = new System.Collections.BitArray((r.MaxDoc() % 64 == 0 ? r.MaxDoc() / 64 : r.MaxDoc() / 64 + 1) * 64);
-                for (int i = 0; i < docs.Length; i++)
-                {
-                    b.Set(docs[i], true);
-                }
-                return new DocIdBitSet(b);
-            }
-            [System.Obsolete()]
-            public override System.Collections.BitArray Bits(IndexReader r)
-            {
-                System.Collections.BitArray b = new System.Collections.BitArray((r.MaxDoc() % 64 == 0 ? r.MaxDoc() / 64 : r.MaxDoc() / 64 + 1) * 64);
-                for (int i = 0; i < docs.Length; i++)
-                {
-                    b.Set(docs[i], true);
-                }
-                return b;
-            }
 		}
 		
 		/// <summary>helper for generating MultiPhraseQueries </summary>

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCache.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFieldCache.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCache.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCache.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,152 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestFieldCache:LuceneTestCase
+	{
+		protected internal IndexReader reader;
+		private const int NUM_DOCS = 1000;
+		
+		public TestFieldCache(System.String s):base(s)
+		{
+		}
+		
+		[SetUp]
+		public override void  SetUp()
+		{
+			base.SetUp();
+			RAMDirectory directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			long theLong = System.Int64.MaxValue;
+			double theDouble = System.Double.MaxValue;
+			byte theByte = (byte) System.SByte.MaxValue;
+			short theShort = System.Int16.MaxValue;
+			int theInt = System.Int32.MaxValue;
+			float theFloat = System.Single.MaxValue;
+			for (int i = 0; i < NUM_DOCS; i++)
+			{
+				Document doc = new Document();
+				doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("theDouble", System.Convert.ToString(theDouble--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("theByte", System.Convert.ToString((byte) theByte--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("theShort", System.Convert.ToString(theShort--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("theInt", System.Convert.ToString(theInt--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				doc.Add(new Field("theFloat", System.Convert.ToString(theFloat--), Field.Store.NO, Field.Index.NOT_ANALYZED));
+				writer.AddDocument(doc);
+			}
+			writer.Close();
+			reader = IndexReader.Open(directory);
+		}
+		
+		[Test]
+		public virtual void  TestInfoStream()
+		{
+			try
+			{
+				FieldCache cache = Lucene.Net.Search.FieldCache_Fields.DEFAULT;
+				System.IO.MemoryStream bos = new System.IO.MemoryStream(1024);
+				cache.SetInfoStream(new System.IO.StreamWriter(bos));
+				double[] doubles = cache.GetDoubles(reader, "theDouble");
+				float[] floats = cache.GetFloats(reader, "theDouble");
+				char[] tmpChar;
+				byte[] tmpByte;
+				tmpByte = bos.GetBuffer();
+				tmpChar = new char[bos.Length];
+				System.Array.Copy(tmpByte, 0, tmpChar, 0, tmpChar.Length);
+				Assert.IsTrue(new System.String(tmpChar).IndexOf("WARNING") != - 1);
+			}
+			finally
+			{
+				Lucene.Net.Search.FieldCache_Fields.DEFAULT.PurgeAllCaches();
+			}
+		}
+		
+		[Test]
+		public virtual void  Test()
+		{
+			FieldCache cache = Lucene.Net.Search.FieldCache_Fields.DEFAULT;
+			double[] doubles = cache.GetDoubles(reader, "theDouble");
+			Assert.AreSame(doubles, cache.GetDoubles(reader, "theDouble"), "Second request to cache return same array");
+			Assert.AreSame(doubles, cache.GetDoubles(reader, "theDouble", Lucene.Net.Search.FieldCache_Fields.DEFAULT_DOUBLE_PARSER), "Second request with explicit parser return same array");
+			Assert.IsTrue(doubles.Length == NUM_DOCS, "doubles Size: " + doubles.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < doubles.Length; i++)
+			{
+				Assert.IsTrue(doubles[i] == (System.Double.MaxValue - i), doubles[i] + " does not equal: " + (System.Double.MaxValue - i));
+			}
+			
+			long[] longs = cache.GetLongs(reader, "theLong");
+			Assert.AreSame(longs, cache.GetLongs(reader, "theLong"), "Second request to cache return same array");
+			Assert.AreSame(longs, cache.GetLongs(reader, "theLong", Lucene.Net.Search.FieldCache_Fields.DEFAULT_LONG_PARSER), "Second request with explicit parser return same array");
+			Assert.IsTrue(longs.Length == NUM_DOCS, "longs Size: " + longs.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < longs.Length; i++)
+			{
+				Assert.IsTrue(longs[i] == (System.Int64.MaxValue - i), longs[i] + " does not equal: " + (System.Int64.MaxValue - i));
+			}
+			
+			byte[] bytes = cache.GetBytes(reader, "theByte");
+			Assert.AreSame(bytes, cache.GetBytes(reader, "theByte"), "Second request to cache return same array");
+			Assert.AreSame(bytes, cache.GetBytes(reader, "theByte", Lucene.Net.Search.FieldCache_Fields.DEFAULT_BYTE_PARSER), "Second request with explicit parser return same array");
+			Assert.IsTrue(bytes.Length == NUM_DOCS, "bytes Size: " + bytes.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < bytes.Length; i++)
+			{
+				Assert.IsTrue(bytes[i] == (byte) ((byte) System.SByte.MaxValue - i), bytes[i] + " does not equal: " + ((byte) System.SByte.MaxValue - i));
+			}
+			
+			short[] shorts = cache.GetShorts(reader, "theShort");
+			Assert.AreSame(shorts, cache.GetShorts(reader, "theShort"), "Second request to cache return same array");
+			Assert.AreSame(shorts, cache.GetShorts(reader, "theShort", Lucene.Net.Search.FieldCache_Fields.DEFAULT_SHORT_PARSER), "Second request with explicit parser return same array");
+			Assert.IsTrue(shorts.Length == NUM_DOCS, "shorts Size: " + shorts.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < shorts.Length; i++)
+			{
+				Assert.IsTrue(shorts[i] == (short) (System.Int16.MaxValue - i), shorts[i] + " does not equal: " + (System.Int16.MaxValue - i));
+			}
+			
+			int[] ints = cache.GetInts(reader, "theInt");
+			Assert.AreSame(ints, cache.GetInts(reader, "theInt"), "Second request to cache return same array");
+			Assert.AreSame(ints, cache.GetInts(reader, "theInt", Lucene.Net.Search.FieldCache_Fields.DEFAULT_INT_PARSER), "Second request with explicit parser return same array");
+			Assert.IsTrue(ints.Length == NUM_DOCS, "ints Size: " + ints.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < ints.Length; i++)
+			{
+				Assert.IsTrue(ints[i] == (System.Int32.MaxValue - i), ints[i] + " does not equal: " + (System.Int32.MaxValue - i));
+			}
+			
+			float[] floats = cache.GetFloats(reader, "theFloat");
+			Assert.AreSame(floats, cache.GetFloats(reader, "theFloat"), "Second request to cache return same array");
+			Assert.AreSame(floats, cache.GetFloats(reader, "theFloat", Lucene.Net.Search.FieldCache_Fields.DEFAULT_FLOAT_PARSER), "Second request with explicit parser return same array");
+			Assert.IsTrue(floats.Length == NUM_DOCS, "floats Size: " + floats.Length + " is not: " + NUM_DOCS);
+			for (int i = 0; i < floats.Length; i++)
+			{
+				Assert.IsTrue(floats[i] == (System.Single.MaxValue - i), floats[i] + " does not equal: " + (System.Single.MaxValue - i));
+			}
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCacheRangeFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFieldCacheRangeFilter.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCacheRangeFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCacheRangeFilter.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,551 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
+	/// 
+	/// <p>
+	/// NOTE: at the moment, this class only tests for 'positive' results,
+	/// it does not verify the results to ensure there are no 'false positives',
+	/// nor does it adequately test 'negative' results.  It also does not test
+	/// that garbage in results in an Exception.
+	/// </summary>
+    [TestFixture]
+	public class TestFieldCacheRangeFilter:BaseTestRangeFilter
+	{
+		
+		public TestFieldCacheRangeFilter(System.String name):base(name)
+		{
+		}
+		public TestFieldCacheRangeFilter():base()
+		{
+		}
+		
+        [Test]
+		public virtual void  TestRangeFilterId()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int medId = ((maxId - minId) / 2);
+			
+			System.String minIP = Pad(minId);
+			System.String maxIP = Pad(maxId);
+			System.String medIP = Pad(medId);
+			
+			int numDocs = reader.NumDocs();
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test id, bounded on both ends
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, maxIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, maxIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, medIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(+ medId - minId, result.Length, "up to med");
+			
+			// unbounded id
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, null, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "min and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, maxIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "max and down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, maxIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, maxIP, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, medIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+			
+			// very small sets
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, minIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, medIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "med,med,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, maxIP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", minIP, minIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", null, minIP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, maxIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", maxIP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("id", medIP, medIP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "med,med,T,T");
+		}
+		
+        [Test]
+		public virtual void  TestFieldCacheRangeFilterRand()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			System.String minRP = Pad(signedIndex.minR);
+			System.String maxRP = Pad(signedIndex.maxR);
+			
+			int numDocs = reader.NumDocs();
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test extremes, bounded on both ends
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but biggest");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but smallest");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, maxRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but extremes");
+			
+			// unbounded
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "smallest and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, maxRP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "biggest and down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not smallest, but up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, maxRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not biggest, but down");
+			
+			// very small sets
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, minRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, maxRP, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", minRP, minRP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", null, minRP, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, maxRP, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewStringRange("rand", maxRP, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+		}
+		
+		// byte-ranges cannot be tested, because all ranges are too big for bytes, need an extra range for that
+		
+        [Test]
+		public virtual void  TestFieldCacheRangeFilterShorts()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int numDocs = reader.NumDocs();
+			int medId = ((maxId - minId) / 2);
+			System.Int16 minIdO = (short) minId;
+			System.Int16 maxIdO = (short) maxId;
+			System.Int16 medIdO = (short) medId;
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test id, bounded on both ends
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, maxIdO, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, maxIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", medIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + medId - minId, result.Length, "up to med");
+			
+			// unbounded id
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", null, null, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "min and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", null, maxIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "max and down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", null, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", medIdO, maxIdO, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, medIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+			
+			// very small sets
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, minIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", medIdO, medIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "med,med,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", maxIdO, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", minIdO, minIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", null, minIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", maxIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", maxIdO, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", medIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "med,med,T,T");
+			
+			// special cases
+			System.Int16 tempAux = (short) System.Int16.MaxValue;
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", tempAux, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "overflow special case");
+			System.Int16 tempAux2 = (short) System.Int16.MinValue;
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", null, tempAux2, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "overflow special case");
+			result = search.Search(q, FieldCacheRangeFilter.NewShortRange("id", maxIdO, minIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "inverse range");
+		}
+		
+        [Test]
+		public virtual void  TestFieldCacheRangeFilterInts()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int numDocs = reader.NumDocs();
+			int medId = ((maxId - minId) / 2);
+			System.Int32 minIdO = (System.Int32) minId;
+			System.Int32 maxIdO = (System.Int32) maxId;
+			System.Int32 medIdO = (System.Int32) medId;
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test id, bounded on both ends
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, maxIdO, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, maxIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", medIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + medId - minId, result.Length, "up to med");
+			
+			// unbounded id
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", null, null, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "min and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", null, maxIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "max and down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", null, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", medIdO, maxIdO, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, medIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+			
+			// very small sets
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, minIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", medIdO, medIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "med,med,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", maxIdO, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", minIdO, minIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", null, minIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", maxIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", maxIdO, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", medIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "med,med,T,T");
+			
+			// special cases
+			System.Int32 tempAux = (System.Int32) System.Int32.MaxValue;
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", tempAux, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "overflow special case");
+			System.Int32 tempAux2 = (System.Int32) System.Int32.MinValue;
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", null, tempAux2, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "overflow special case");
+			result = search.Search(q, FieldCacheRangeFilter.NewIntRange("id", maxIdO, minIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "inverse range");
+		}
+		
+        [Test]
+		public virtual void  TestFieldCacheRangeFilterLongs()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int numDocs = reader.NumDocs();
+			int medId = ((maxId - minId) / 2);
+			System.Int64 minIdO = (long) minId;
+			System.Int64 maxIdO = (long) maxId;
+			System.Int64 medIdO = (long) medId;
+			
+			Assert.AreEqual(numDocs, 1 + maxId - minId, "num of docs");
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			// test id, bounded on both ends
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, maxIdO, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but last");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, maxIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "all but first");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 2, result.Length, "all but ends");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", medIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + maxId - medId, result.Length, "med and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1 + medId - minId, result.Length, "up to med");
+			
+			// unbounded id
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", null, null, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "min and up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", null, maxIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "max and down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not min, but up");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", null, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs - 1, result.Length, "not max, but down");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", medIdO, maxIdO, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(maxId - medId, result.Length, "med and up, not max");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, medIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(medId - minId, result.Length, "not min, up to med");
+			
+			// very small sets
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, minIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "min,min,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", medIdO, medIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "med,med,F,F");
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", maxIdO, maxIdO, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "max,max,F,F");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", minIdO, minIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "min,min,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", null, minIdO, F, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "nul,min,F,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", maxIdO, maxIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,max,T,T");
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", maxIdO, null, T, F), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "max,nul,T,T");
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", medIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(1, result.Length, "med,med,T,T");
+			
+			// special cases
+			System.Int64 tempAux = (long) System.Int64.MaxValue;
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", tempAux, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "overflow special case");
+			System.Int64 tempAux2 = (long) System.Int64.MinValue;
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", null, tempAux2, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "overflow special case");
+			result = search.Search(q, FieldCacheRangeFilter.NewLongRange("id", maxIdO, minIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "inverse range");
+		}
+		
+		// float and double tests are a bit minimalistic, but its complicated, because missing precision
+		
+        [Test]
+		public virtual void  TestFieldCacheRangeFilterFloats()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int numDocs = reader.NumDocs();
+			System.Single minIdO = (float) (minId + .5f);
+			System.Single medIdO = (float) ((float) minIdO + ((float) (maxId - minId)) / 2.0f);
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewFloatRange("id", minIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs / 2, result.Length, "find all");
+			int count = 0;
+			result = search.Search(q, FieldCacheRangeFilter.NewFloatRange("id", null, medIdO, F, T), numDocs).scoreDocs;
+			count += result.Length;
+			result = search.Search(q, FieldCacheRangeFilter.NewFloatRange("id", medIdO, null, F, F), numDocs).scoreDocs;
+			count += result.Length;
+			Assert.AreEqual(numDocs, count, "sum of two concenatted ranges");
+			result = search.Search(q, FieldCacheRangeFilter.NewFloatRange("id", null, null, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			System.Single tempAux = (float) System.Single.PositiveInfinity;
+			result = search.Search(q, FieldCacheRangeFilter.NewFloatRange("id", tempAux, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "infinity special case");
+			System.Single tempAux2 = (float) System.Single.NegativeInfinity;
+			result = search.Search(q, FieldCacheRangeFilter.NewFloatRange("id", null, tempAux2, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "infinity special case");
+		}
+		
+        [Test]
+		public virtual void  TestFieldCacheRangeFilterDoubles()
+		{
+			
+			IndexReader reader = IndexReader.Open(signedIndex.index);
+			IndexSearcher search = new IndexSearcher(reader);
+			
+			int numDocs = reader.NumDocs();
+			System.Double minIdO = (double) (minId + .5);
+			System.Double medIdO = (double) ((float) minIdO + ((double) (maxId - minId)) / 2.0);
+			
+			ScoreDoc[] result;
+			Query q = new TermQuery(new Term("body", "body"));
+			
+			result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", minIdO, medIdO, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs / 2, result.Length, "find all");
+			int count = 0;
+			result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, medIdO, F, T), numDocs).scoreDocs;
+			count += result.Length;
+			result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", medIdO, null, F, F), numDocs).scoreDocs;
+			count += result.Length;
+			Assert.AreEqual(numDocs, count, "sum of two concenatted ranges");
+			result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, null, T, T), numDocs).scoreDocs;
+			Assert.AreEqual(numDocs, result.Length, "find all");
+			System.Double tempAux = (double) System.Double.PositiveInfinity;
+			result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", tempAux, null, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "infinity special case");
+			System.Double tempAux2 = (double) System.Double.NegativeInfinity;
+			result = search.Search(q, FieldCacheRangeFilter.NewDoubleRange("id", null, tempAux2, F, F), numDocs).scoreDocs;
+			Assert.AreEqual(0, result.Length, "infinity special case");
+		}
+	}
+}
\ No newline at end of file

Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCacheTermsFilter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFieldCacheTermsFilter.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCacheTermsFilter.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFieldCacheTermsFilter.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,83 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using KeywordAnalyzer = Lucene.Net.Analysis.KeywordAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+	/// <summary> A basic unit test for FieldCacheTermsFilter
+	/// 
+	/// </summary>
+	/// <seealso cref="Lucene.Net.Search.FieldCacheTermsFilter">
+	/// </seealso>
+    [TestFixture]
+	public class TestFieldCacheTermsFilter:LuceneTestCase
+	{
+        [Test]
+		public virtual void  TestMissingTerms()
+		{
+			System.String fieldName = "field1";
+			MockRAMDirectory rd = new MockRAMDirectory();
+			IndexWriter w = new IndexWriter(rd, new KeywordAnalyzer(), MaxFieldLength.UNLIMITED);
+			for (int i = 0; i < 100; i++)
+			{
+				Document doc = new Document();
+				int term = i * 10; //terms are units of 10;
+				doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.NOT_ANALYZED));
+				w.AddDocument(doc);
+			}
+			w.Close();
+			
+			IndexReader reader = IndexReader.Open(rd);
+			IndexSearcher searcher = new IndexSearcher(reader);
+			int numDocs = reader.NumDocs();
+			ScoreDoc[] results;
+			MatchAllDocsQuery q = new MatchAllDocsQuery();
+			
+			System.Collections.ArrayList terms = new System.Collections.ArrayList();
+			terms.Add("5");
+			results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[]) terms.ToArray(typeof(System.String))), numDocs).scoreDocs;
+			Assert.AreEqual(0, results.Length, "Must match nothing");
+			
+			terms = new System.Collections.ArrayList();
+			terms.Add("10");
+            results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[])terms.ToArray(typeof(System.String))), numDocs).scoreDocs;
+			Assert.AreEqual(1, results.Length, "Must match 1");
+			
+			terms = new System.Collections.ArrayList();
+			terms.Add("10");
+			terms.Add("20");
+			results = searcher.Search(q, new FieldCacheTermsFilter(fieldName, (System.String[]) terms.ToArray(typeof(System.String))), numDocs).scoreDocs;
+			Assert.AreEqual(2, results.Length, "Must match 2");
+			
+			reader.Close();
+			rd.Close();
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,16 +19,16 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 using Occur = Lucene.Net.Search.BooleanClause.Occur;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
-using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
 
 namespace Lucene.Net.Search
 {
@@ -39,53 +39,32 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestFilteredQuery.java 587050 2007-10-22 09:58:48Z doronc $
+	/// <version>  $Id: TestFilteredQuery.java 807821 2009-08-25 21:55:49Z mikemccand $
 	/// </version>
 	/// <since>   1.4
 	/// </since>
-	[TestFixture]
-	public class TestFilteredQuery : LuceneTestCase
+    [TestFixture]
+	public class TestFilteredQuery:LuceneTestCase
 	{
 		[Serializable]
-		private class AnonymousClassFilter : Filter
+		private class AnonymousClassFilter:Filter
 		{
-            public override DocIdSet GetDocIdSet(IndexReader reader)
-            {
-                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
-                bitset.Set(1, true);
-                bitset.Set(3, true);
-                return new DocIdBitSet(bitset);
-            }
-            [System.Obsolete()]
-            public override System.Collections.BitArray Bits(IndexReader reader)
+			public override DocIdSet GetDocIdSet(IndexReader reader)
 			{
-                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
+				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0?5 / 64:5 / 64 + 1) * 64);
 				bitset.Set(1, true);
 				bitset.Set(3, true);
-				return bitset;
+				return new DocIdBitSet(bitset);
 			}
 		}
 		[Serializable]
-		private class AnonymousClassFilter1 : Filter
+		private class AnonymousClassFilter1:Filter
 		{
-            public override DocIdSet GetDocIdSet(IndexReader reader)
-            {
-                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
-                for (int i = 0; i < 5; i++)
-                {
-                    bitset.Set(i, true);
-                }
-                return new DocIdBitSet(bitset);
-            }
-            [System.Obsolete()]
-            public override System.Collections.BitArray Bits(IndexReader reader)
+			public override DocIdSet GetDocIdSet(IndexReader reader)
 			{
-                System.Collections.BitArray bitset = new System.Collections.BitArray(64/*(5 % 64 == 0 ? 5 / 64 : 5 / 64 + 1) * 64*/);
-				for (int i = 0; i < 5; i++)
-				{
-					bitset.Set(i, true);
-				} 
-				return bitset;
+				System.Collections.BitArray bitset = new System.Collections.BitArray((5 % 64 == 0?5 / 64:5 / 64 + 1) * 64);
+				for (int i = 0; i < 5; i++) bitset.Set(i, true);
+				return new DocIdBitSet(bitset);
 			}
 		}
 		
@@ -94,28 +73,29 @@
 		private Query query;
 		private Filter filter;
 		
-		[SetUp]
-		public override void SetUp()
+		[Test]
+		public override void  SetUp()
 		{
+			base.SetUp();
 			directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("sorter", "b", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("field", "one two three four", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("sorter", "d", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("field", "one two three y", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("sorter", "a", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 			
-			doc = new Lucene.Net.Documents.Document();
+			doc = new Document();
 			doc.Add(new Field("field", "one two x", Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("sorter", "c", Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
@@ -125,7 +105,7 @@
 			
 			searcher = new IndexSearcher(directory);
 			query = new TermQuery(new Term("field", "three"));
-			filter = new AnonymousClassFilter();
+			filter = NewStaticFilterB();
 		}
 		
 		// must be static for serialization tests
@@ -135,14 +115,15 @@
 		}
 		
 		[TearDown]
-		public override void TearDown()
+		public override void  TearDown()
 		{
 			searcher.Close();
 			directory.Close();
+			base.TearDown();
 		}
 		
 		[Test]
-		public virtual void  TestFilteredQuery_Renamed_Method()
+		public virtual void  TestFilteredQuery_Renamed()
 		{
 			Query filteredquery = new FilteredQuery(query, filter);
 			ScoreDoc[] hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
@@ -216,25 +197,39 @@
 		[Test]
 		public virtual void  TestRangeQuery()
 		{
-			RangeQuery rq = new RangeQuery(new Term("sorter", "b"), new Term("sorter", "d"), true);
+			TermRangeQuery rq = new TermRangeQuery("sorter", "b", "d", true, true);
 			
 			Query filteredquery = new FilteredQuery(rq, filter);
 			ScoreDoc[] hits = searcher.Search(filteredquery, null, 1000).scoreDocs;
 			Assert.AreEqual(2, hits.Length);
 			QueryUtils.Check(filteredquery, searcher);
 		}
-
-		[Test]		
+		
+		[Test]
 		public virtual void  TestBoolean()
 		{
 			BooleanQuery bq = new BooleanQuery();
-			Query query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.Search.SingleDocTestFilter(0));
+			Query query = new FilteredQuery(new MatchAllDocsQuery(), new SingleDocTestFilter(0));
 			bq.Add(query, BooleanClause.Occur.MUST);
-			query = new FilteredQuery(new MatchAllDocsQuery(), new Lucene.Net.Search.SingleDocTestFilter(1));
+			query = new FilteredQuery(new MatchAllDocsQuery(), new SingleDocTestFilter(1));
 			bq.Add(query, BooleanClause.Occur.MUST);
 			ScoreDoc[] hits = searcher.Search(bq, null, 1000).scoreDocs;
 			Assert.AreEqual(0, hits.Length);
 			QueryUtils.Check(query, searcher);
 		}
+		
+		// Make sure BooleanQuery, which does out-of-order
+		// scoring, inside FilteredQuery, works
+		[Test]
+		public virtual void  TestBoolean2()
+		{
+			BooleanQuery bq = new BooleanQuery();
+			Query query = new FilteredQuery(bq, new SingleDocTestFilter(0));
+			bq.Add(new TermQuery(new Term("field", "one")), BooleanClause.Occur.SHOULD);
+			bq.Add(new TermQuery(new Term("field", "two")), BooleanClause.Occur.SHOULD);
+			ScoreDoc[] hits = searcher.Search(query, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
+			QueryUtils.Check(query, searcher);
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFilteredSearch.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs Tue Nov  3 18:06:27 2009
@@ -1,13 +1,13 @@
-/**
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -15,6 +15,8 @@
  * limitations under the License.
  */
 
+using System;
+
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -25,72 +27,76 @@
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 using OpenBitSet = Lucene.Net.Util.OpenBitSet;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
-    /**
-     *
-     */
+	
+	
+	/// <summary> </summary>
     [TestFixture]
-    public class TestFilteredSearch
-    {
-        private const string FIELD = "category";
-
-        [Test]
-        public void TestFilteredSearch_Renamed()
-        {
-            RAMDirectory directory = new RAMDirectory();
-            int[] filterBits = { 1, 36 };
-            Filter filter = new SimpleDocIdSetFilter(filterBits);
-
-            try
-            {
-                IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-                for (int i = 0; i < 60; i++)
-                {//Simple docs
-                    Document doc = new Document();
-                    doc.Add(new Field(FIELD, "" + i, Field.Store.YES, Field.Index.NOT_ANALYZED));
-                    writer.AddDocument(doc);
-                }
-                writer.Close();
-
-                BooleanQuery booleanQuery = new BooleanQuery();
-                booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), BooleanClause.Occur.SHOULD);
-
-
-                IndexSearcher indexSearcher = new IndexSearcher(directory);
-                ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).scoreDocs;
-                Assert.AreEqual(1, hits.Length, "Number of matched documents");
-            }
-            catch (System.IO.IOException e)
-            {
-                Assert.Fail(e.Message);
-            }
-        }
-
-        public class SimpleDocIdSetFilter : Filter
-        {
-            private OpenBitSet bits;
-
-            public SimpleDocIdSetFilter(int[] docs)
-            {
-                bits = new OpenBitSet();
-                for (int i = 0; i < docs.Length; i++)
-                {
-                    bits.Set(docs[i]);
-                }
-            }
-
-            public override DocIdSet GetDocIdSet(IndexReader reader)
-            {
-                return bits;
-            }
-
-            [System.Obsolete()]
-            public override System.Collections.BitArray Bits(IndexReader reader)
-            {
-                return null;
-            }
-        }
-    }
-}
+	public class TestFilteredSearch:LuceneTestCase
+	{
+		
+		public TestFilteredSearch(System.String name):base(name)
+		{
+		}
+		
+		private const System.String FIELD = "category";
+		
+		[Test]
+		public virtual void  TestFilteredSearch_Renamed()
+		{
+			RAMDirectory directory = new RAMDirectory();
+			int[] filterBits = new int[]{1, 36};
+			Filter filter = new SimpleDocIdSetFilter(filterBits);
+			
+			
+			try
+			{
+				IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+				for (int i = 0; i < 60; i++)
+				{
+					//Simple docs
+					Document doc = new Document();
+					doc.Add(new Field(FIELD, System.Convert.ToString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
+					writer.AddDocument(doc);
+				}
+				writer.Close();
+				
+				BooleanQuery booleanQuery = new BooleanQuery();
+				booleanQuery.Add(new TermQuery(new Term(FIELD, "36")), BooleanClause.Occur.SHOULD);
+				
+				
+				IndexSearcher indexSearcher = new IndexSearcher(directory);
+				ScoreDoc[] hits = indexSearcher.Search(booleanQuery, filter, 1000).scoreDocs;
+				Assert.AreEqual(1, hits.Length, "Number of matched documents");
+			}
+			catch (System.IO.IOException e)
+			{
+				Assert.Fail(e.Message);
+			}
+		}
+		
+		
+		[Serializable]
+		public sealed class SimpleDocIdSetFilter:Filter
+		{
+			private OpenBitSet bits;
+			
+			public SimpleDocIdSetFilter(int[] docs)
+			{
+				bits = new OpenBitSet();
+				for (int i = 0; i < docs.Length; i++)
+				{
+					bits.Set(docs[i]);
+				}
+			}
+			
+			public override DocIdSet GetDocIdSet(IndexReader reader)
+			{
+				return bits;
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestFuzzyQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestFuzzyQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,12 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -33,9 +33,10 @@
 	/// <summary> Tests {@link FuzzyQuery}.
 	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestFuzzyQuery : LuceneTestCase
+    [TestFixture]
+	public class TestFuzzyQuery:LuceneTestCase
 	{
+		
 		[Test]
 		public virtual void  TestFuzziness()
 		{
@@ -244,7 +245,7 @@
 				query = new FuzzyQuery(new Term("field", "student"), 1.1f);
 				Assert.Fail("Expected IllegalArgumentException");
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expecting exception
 			}
@@ -253,7 +254,7 @@
 				query = new FuzzyQuery(new Term("field", "student"), - 0.1f);
 				Assert.Fail("Expected IllegalArgumentException");
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// expecting exception
 			}
@@ -262,9 +263,42 @@
 			directory.Close();
 		}
 		
+		[Test]
+		public virtual void  TestTokenLengthOpt()
+		{
+			RAMDirectory directory = new RAMDirectory();
+			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			AddDoc("12345678911", writer);
+			AddDoc("segment", writer);
+			writer.Optimize();
+			writer.Close();
+			IndexSearcher searcher = new IndexSearcher(directory);
+			
+			Query query;
+			// term not over 10 chars, so optimization shortcuts
+			query = new FuzzyQuery(new Term("field", "1234569"), 0.9f);
+			ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length);
+			
+			// 10 chars, so no optimization
+			query = new FuzzyQuery(new Term("field", "1234567891"), 0.9f);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length);
+			
+			// over 10 chars, so no optimization
+			query = new FuzzyQuery(new Term("field", "12345678911"), 0.9f);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(1, hits.Length);
+			
+			// over 10 chars, no match
+			query = new FuzzyQuery(new Term("field", "sdfsdfsdfsdf"), 0.9f);
+			hits = searcher.Search(query, null, 1000).scoreDocs;
+			Assert.AreEqual(0, hits.Length);
+		}
+		
 		private void  AddDoc(System.String text, IndexWriter writer)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("field", text, Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMatchAllDocsQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMatchAllDocsQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,16 @@
 
 using NUnit.Framework;
 
+using Analyzer = Lucene.Net.Analysis.Analyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Version = Lucene.Net.Util.Version;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -33,43 +37,97 @@
 	/// <summary> Tests MatchAllDocsQuery.
 	/// 
 	/// </summary>
-	[TestFixture]
-	public class TestMatchAllDocsQuery : LuceneTestCase
+    [TestFixture]
+	public class TestMatchAllDocsQuery:LuceneTestCase
 	{
+		public TestMatchAllDocsQuery()
+		{
+			InitBlock();
+		}
+		private void  InitBlock()
+		{
+			analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
+		}
+		private Analyzer analyzer;
+		
 		[Test]
 		public virtual void  TestQuery()
 		{
+			
 			RAMDirectory dir = new RAMDirectory();
-			IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-			AddDoc("one", iw);
-			AddDoc("two", iw);
-			AddDoc("three four", iw);
+			IndexWriter iw = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+			iw.SetMaxBufferedDocs(2); // force multi-segment
+			AddDoc("one", iw, 1f);
+			AddDoc("two", iw, 20f);
+			AddDoc("three four", iw, 300f);
 			iw.Close();
 			
-			IndexSearcher is_Renamed = new IndexSearcher(dir);
-			ScoreDoc[] hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
+			IndexReader ir = IndexReader.Open(dir);
+			IndexSearcher is_Renamed = new IndexSearcher(ir);
+			ScoreDoc[] hits;
+			
+			// assert with norms scoring turned off
+			
+			hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
 			Assert.AreEqual(3, hits.Length);
+			Assert.AreEqual(ir.Document(hits[0].doc).Get("key"), "one");
+			Assert.AreEqual(ir.Document(hits[1].doc).Get("key"), "two");
+			Assert.AreEqual(ir.Document(hits[2].doc).Get("key"), "three four");
+			
+			// assert with norms scoring turned on
+			
+			MatchAllDocsQuery normsQuery = new MatchAllDocsQuery("key");
+			hits = is_Renamed.Search(normsQuery, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length);
+			
+			Assert.AreEqual(ir.Document(hits[0].doc).Get("key"), "three four");
+			Assert.AreEqual(ir.Document(hits[1].doc).Get("key"), "two");
+			Assert.AreEqual(ir.Document(hits[2].doc).Get("key"), "one");
+			
+			// change norm & retest
+			ir.SetNorm(0, "key", 400f);
+			normsQuery = new MatchAllDocsQuery("key");
+			hits = is_Renamed.Search(normsQuery, null, 1000).scoreDocs;
+			Assert.AreEqual(3, hits.Length);
+			
+			Assert.AreEqual(ir.Document(hits[0].doc).Get("key"), "one");
+			Assert.AreEqual(ir.Document(hits[1].doc).Get("key"), "three four");
+			Assert.AreEqual(ir.Document(hits[2].doc).Get("key"), "two");
 			
 			// some artificial queries to trigger the use of skipTo():
 			
 			BooleanQuery bq = new BooleanQuery();
 			bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
 			bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
-            hits = is_Renamed.Search(bq, null, 1000).scoreDocs;
+			hits = is_Renamed.Search(bq, null, 1000).scoreDocs;
 			Assert.AreEqual(3, hits.Length);
 			
 			bq = new BooleanQuery();
 			bq.Add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
 			bq.Add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST);
-            hits = is_Renamed.Search(bq, null, 1000).scoreDocs;
+			hits = is_Renamed.Search(bq, null, 1000).scoreDocs;
 			Assert.AreEqual(1, hits.Length);
 			
 			// delete a document:
 			is_Renamed.GetIndexReader().DeleteDocument(0);
-            hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
+			hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length);
+			
+			// test parsable toString()
+			QueryParser qp = new QueryParser("key", analyzer);
+			hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000).scoreDocs;
+			Assert.AreEqual(2, hits.Length);
+			
+			// test parsable toString() with non default boost
+			Query maq = new MatchAllDocsQuery();
+			maq.SetBoost(2.3f);
+			Query pq = qp.Parse(maq.ToString());
+			hits = is_Renamed.Search(pq, null, 1000).scoreDocs;
 			Assert.AreEqual(2, hits.Length);
 			
 			is_Renamed.Close();
+			ir.Close();
+			dir.Close();
 		}
 		
 		[Test]
@@ -82,10 +140,12 @@
 			Assert.IsFalse(q1.Equals(q2));
 		}
 		
-		private void  AddDoc(System.String text, IndexWriter iw)
+		private void  AddDoc(System.String text, IndexWriter iw, float boost)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
-			doc.Add(new Field("key", text, Field.Store.YES, Field.Index.ANALYZED));
+			Document doc = new Document();
+			Field f = new Field("key", text, Field.Store.YES, Field.Index.ANALYZED);
+			f.SetBoost(boost);
+			doc.Add(f);
 			iw.AddDocument(doc);
 		}
 	}

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestMultiPhraseQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestMultiPhraseQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,8 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,22 +28,23 @@
 using Term = Lucene.Net.Index.Term;
 using TermEnum = Lucene.Net.Index.TermEnum;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
-using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
-
+	
 	/// <summary> This class tests the MultiPhraseQuery class.
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Id: TestMultiPhraseQuery.java 583534 2007-10-10 16:46:35Z mikemccand $
+	/// <version>  $Id: TestMultiPhraseQuery.java 794078 2009-07-14 21:39:22Z markrmiller $
 	/// </version>
-	[TestFixture]
-	public class TestMultiPhraseQuery : LuceneTestCase
+    [TestFixture]
+	public class TestMultiPhraseQuery:LuceneTestCase
 	{
+		public TestMultiPhraseQuery(System.String name):base(name)
+		{
+		}
 		
 		[Test]
 		public virtual void  TestPhrasePrefix()
@@ -81,7 +84,7 @@
 				}
 			}
 			while (te.Next());
-
+			
 			query1.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
 			Assert.AreEqual("body:\"blueberry (piccadilly pie pizza)\"", query1.ToString());
 			query2.Add((Term[]) termsWithPrefix.ToArray(typeof(Term)));
@@ -90,7 +93,7 @@
 			ScoreDoc[] result;
 			result = searcher.Search(query1, null, 1000).scoreDocs;
 			Assert.AreEqual(2, result.Length);
-            result = searcher.Search(query2, null, 1000).scoreDocs;
+			result = searcher.Search(query2, null, 1000).scoreDocs;
 			Assert.AreEqual(0, result.Length);
 			
 			// search for "blue* pizza":
@@ -125,7 +128,7 @@
 				query4.Add(new Term("field2", "foobar"));
 				Assert.Fail();
 			}
-			catch (System.ArgumentException)
+			catch (System.ArgumentException e)
 			{
 				// okay, all terms must belong to the same field
 			}
@@ -136,7 +139,7 @@
 		
 		private void  Add(System.String s, IndexWriter writer)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
 			writer.AddDocument(doc);
 		}
@@ -150,7 +153,7 @@
 			// The contained PhraseMultiQuery must contain exactly one term array.
 			
 			RAMDirectory indexStore = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			Add("blueberry pie", writer);
 			Add("blueberry chewing gum", writer);
 			Add("blue raspberry pie", writer);
@@ -177,7 +180,7 @@
 		public virtual void  TestPhrasePrefixWithBooleanQuery()
 		{
 			RAMDirectory indexStore = new RAMDirectory();
-            IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.String[] { }), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new System.Collections.Hashtable(0)), true, IndexWriter.MaxFieldLength.LIMITED);
 			Add("This is a test", "object", writer);
 			Add("a note", "note", writer);
 			writer.Close();
@@ -199,9 +202,40 @@
 			searcher.Close();
 		}
 		
+		[Test]
+		public virtual void  TestHashCodeAndEquals()
+		{
+			MultiPhraseQuery query1 = new MultiPhraseQuery();
+			MultiPhraseQuery query2 = new MultiPhraseQuery();
+			
+			Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+			Assert.AreEqual(query1, query2);
+			
+			Term term1 = new Term("someField", "someText");
+			
+			query1.Add(term1);
+			query2.Add(term1);
+			
+			Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+			Assert.AreEqual(query1, query2);
+			
+			Term term2 = new Term("someField", "someMoreText");
+			
+			query1.Add(term2);
+			
+			Assert.IsFalse(query1.GetHashCode() == query2.GetHashCode());
+			Assert.IsFalse(query1.Equals(query2));
+			
+			query2.Add(term2);
+			
+			Assert.AreEqual(query1.GetHashCode(), query2.GetHashCode());
+			Assert.AreEqual(query1, query2);
+		}
+		
+		
 		private void  Add(System.String s, System.String type, IndexWriter writer)
 		{
-			Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
+			Document doc = new Document();
 			doc.Add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
 			doc.Add(new Field("type", type, Field.Store.YES, Field.Index.NOT_ANALYZED));
 			writer.AddDocument(doc);



Mime
View raw message