lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aro...@apache.org
Subject svn commit: r832486 [23/29] - in /incubator/lucene.net/trunk/C#/src: ./ Demo/DeleteFiles/ Demo/DemoLib/ Demo/IndexFiles/ Demo/IndexHtml/ Demo/SearchFiles/ Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Document/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene...
Date Tue, 03 Nov 2009 18:06:38 GMT
Added: incubator/lucene.net/trunk/C#/src/Test/Search/TestScoreCachingWrappingScorer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestScoreCachingWrappingScorer.cs?rev=832486&view=auto
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestScoreCachingWrappingScorer.cs (added)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestScoreCachingWrappingScorer.cs Tue Nov  3 18:06:27 2009
@@ -0,0 +1,158 @@
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+using NUnit.Framework;
+
+using IndexReader = Lucene.Net.Index.IndexReader;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+namespace Lucene.Net.Search
+{
+	
+    [TestFixture]
+	public class TestScoreCachingWrappingScorer:LuceneTestCase
+	{
+		
+		private sealed class SimpleScorer:Scorer
+		{
+			private int idx = 0;
+			private int doc = - 1;
+			
+			public SimpleScorer():base(null)
+			{
+			}
+			
+			public override Explanation Explain(int doc)
+			{
+				return null;
+			}
+			
+			public override float Score()
+			{
+				// advance idx on purpose, so that consecutive calls to score will get
+				// different results. This is to emulate computation of a score. If
+				// ScoreCachingWrappingScorer is used, this should not be called more than
+				// once per document.
+				return idx == Lucene.Net.Search.TestScoreCachingWrappingScorer.scores.Length?System.Single.NaN:Lucene.Net.Search.TestScoreCachingWrappingScorer.scores[idx++];
+			}
+			
+			/// <deprecated> use {@link #DocID()} instead. 
+			/// </deprecated>
+			public override int Doc()
+			{
+				return doc;
+			}
+			
+			public override int DocID()
+			{
+				return doc;
+			}
+			
+			/// <deprecated> use {@link #NextDoc()} instead. 
+			/// </deprecated>
+			public override bool Next()
+			{
+				return NextDoc() != NO_MORE_DOCS;
+			}
+			
+			public override int NextDoc()
+			{
+				return ++doc < Lucene.Net.Search.TestScoreCachingWrappingScorer.scores.Length?doc:NO_MORE_DOCS;
+			}
+			
+			/// <deprecated> use {@link #Advance(int)} instead. 
+			/// </deprecated>
+			public override bool SkipTo(int target)
+			{
+				return Advance(target) != NO_MORE_DOCS;
+			}
+			
+			public override int Advance(int target)
+			{
+				doc = target;
+				return doc < Lucene.Net.Search.TestScoreCachingWrappingScorer.scores.Length?doc:NO_MORE_DOCS;
+			}
+		}
+		
+		private sealed class ScoreCachingCollector:Collector
+		{
+			
+			private int idx = 0;
+			private Scorer scorer;
+			internal float[] mscores;
+			
+			public ScoreCachingCollector(int numToCollect)
+			{
+				mscores = new float[numToCollect];
+			}
+			
+			public override void  Collect(int doc)
+			{
+				// just a sanity check to avoid IOOB.
+				if (idx == mscores.Length)
+				{
+					return ;
+				}
+				
+				// just call score() a couple of times and record the score.
+				mscores[idx] = scorer.Score();
+				mscores[idx] = scorer.Score();
+				mscores[idx] = scorer.Score();
+				++idx;
+			}
+			
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+			}
+			
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = new ScoreCachingWrappingScorer(scorer);
+			}
+			
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
+			}
+		}
+		
+		private static readonly float[] scores = new float[]{0.7767749f, 1.7839992f, 8.9925785f, 7.9608946f, 0.07948637f, 2.6356435f, 7.4950366f, 7.1490803f, 8.108544f, 4.961808f, 2.2423935f, 7.285586f, 4.6699767f};
+		
+        [Test]
+		public virtual void  TestGetScores()
+		{
+			
+			Scorer s = new SimpleScorer();
+			ScoreCachingCollector scc = new ScoreCachingCollector(scores.Length);
+			scc.SetScorer(s);
+			
+			// We need to iterate on the scorer so that its doc() advances.
+			int doc;
+			while ((doc = s.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS)
+			{
+				scc.Collect(doc);
+			}
+			
+			for (int i = 0; i < scores.Length; i++)
+			{
+				Assert.AreEqual(scores[i], scc.mscores[i], 0f);
+			}
+		}
+	}
+}
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestScorerPerf.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestScorerPerf.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,9 +27,8 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
-using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 using DocIdBitSet = Lucene.Net.Util.DocIdBitSet;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -36,10 +36,38 @@
 	/// <summary> </summary>
 	/// <version>  $Id$
 	/// </version>
-	[TestFixture]
-	public class TestScorerPerf : LuceneTestCase
+    [TestFixture]
+	public class TestScorerPerf:LuceneTestCase
 	{
-		internal System.Random r = new System.Random((System.Int32) 0);
+		[Serializable]
+		private class AnonymousClassFilter:Filter
+		{
+			public AnonymousClassFilter(System.Collections.BitArray rnd, TestScorerPerf enclosingInstance)
+			{
+				InitBlock(rnd, enclosingInstance);
+			}
+			private void  InitBlock(System.Collections.BitArray rnd, TestScorerPerf enclosingInstance)
+			{
+				this.rnd = rnd;
+				this.enclosingInstance = enclosingInstance;
+			}
+			private System.Collections.BitArray rnd;
+			private TestScorerPerf enclosingInstance;
+			public TestScorerPerf Enclosing_Instance
+			{
+				get
+				{
+					return enclosingInstance;
+				}
+				
+			}
+			public override DocIdSet GetDocIdSet(IndexReader reader)
+			{
+				return new DocIdBitSet(rnd);
+			}
+			
+		}
+		internal System.Random r;
 		internal bool validate = true; // set to false when doing performance testing
 		
 		internal System.Collections.BitArray[] sets;
@@ -52,6 +80,7 @@
 			// This could possibly fail if Lucene starts checking for docid ranges...
 			RAMDirectory rd = new RAMDirectory();
 			IndexWriter iw = new IndexWriter(rd, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			iw.AddDocument(new Document());
 			iw.Close();
 			s = new IndexSearcher(rd);
 		}
@@ -59,14 +88,15 @@
 		public virtual void  CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
 		{
 			int[] freq = new int[nTerms];
+			terms = new Term[nTerms];
 			for (int i = 0; i < nTerms; i++)
 			{
 				int f = (nTerms + 1) - i; // make first terms less frequent
 				freq[i] = (int) System.Math.Ceiling(System.Math.Pow(f, power));
 				terms[i] = new Term("f", System.Convert.ToString((char) ('A' + i)));
 			}
-
-            IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			for (int i = 0; i < nDocs; i++)
 			{
 				Document d = new Document();
@@ -87,7 +117,7 @@
 		
 		public virtual System.Collections.BitArray RandBitSet(int sz, int numBitsToSet)
 		{
-			System.Collections.BitArray set_Renamed = new System.Collections.BitArray((sz % 64 == 0 ? sz / 64 : sz / 64 + 1) * 64);
+			System.Collections.BitArray set_Renamed = new System.Collections.BitArray((sz % 64 == 0?sz / 64:sz / 64 + 1) * 64);
 			for (int i = 0; i < numBitsToSet; i++)
 			{
 				set_Renamed.Set(r.Next(sz), true);
@@ -105,15 +135,20 @@
 			return sets;
 		}
 		
-		public class CountingHitCollector : HitCollector
+		public class CountingHitCollector:Collector
 		{
 			internal int count = 0;
 			internal int sum = 0;
+			protected internal int docBase = 0;
 			
-			public override void  Collect(int doc, float score)
+			public override void  SetScorer(Scorer scorer)
+			{
+			}
+			
+			public override void  Collect(int doc)
 			{
 				count++;
-				sum += doc; // use it to avoid any possibility of being optimized away
+				sum += docBase + doc; // use it to avoid any possibility of being optimized away
 			}
 			
 			public virtual int GetCount()
@@ -124,10 +159,19 @@
 			{
 				return sum;
 			}
+			
+			public override void  SetNextReader(IndexReader reader, int base_Renamed)
+			{
+				docBase = base_Renamed;
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
+			}
 		}
 		
 		
-		public class MatchingHitCollector : CountingHitCollector
+		public class MatchingHitCollector:CountingHitCollector
 		{
 			internal System.Collections.BitArray answer;
 			internal int pos = - 1;
@@ -136,39 +180,23 @@
 				this.answer = answer;
 			}
 			
-			public override void  Collect(int doc, float score)
+			public virtual void  Collect(int doc, float score)
 			{
-				pos = SupportClass.Number.NextSetBit(answer, pos + 1);
-				if (pos != doc)
+				
+				pos = SupportClass.BitSetSupport.NextSetBit(answer, pos + 1);
+				if (pos != doc + docBase)
 				{
-					throw new System.SystemException("Expected doc " + pos + " but got " + doc);
+					throw new System.SystemException("Expected doc " + pos + " but got " + doc + docBase);
 				}
-				base.Collect(doc, score);
+				base.Collect(doc);
 			}
 		}
 		
-		public class AnonymousClassFilter : Filter
-        {
-            private System.Collections.BitArray rnd;
-            public AnonymousClassFilter(System.Collections.BitArray rnd)
-            {
-                this.rnd = rnd;
-            }
-            override public DocIdSet GetDocIdSet(IndexReader reader)
-            {
-                return new DocIdBitSet(rnd);
-            }
-            [System.Obsolete()]
-            override public System.Collections.BitArray Bits(IndexReader reader)
-            {
-                return null;
-            }
-        }
-
+		
 		internal virtual System.Collections.BitArray AddClause(BooleanQuery bq, System.Collections.BitArray result)
 		{
 			System.Collections.BitArray rnd = sets[r.Next(sets.Length)];
-			Query q = new ConstantScoreQuery(new AnonymousClassFilter(rnd));
+			Query q = new ConstantScoreQuery(new AnonymousClassFilter(rnd, this));
 			bq.Add(q, BooleanClause.Occur.MUST);
 			if (validate)
 			{
@@ -200,8 +228,9 @@
 				CountingHitCollector hc = validate?new MatchingHitCollector(result):new CountingHitCollector();
 				s.Search(bq, hc);
 				ret += hc.GetSum();
+				
 				if (validate)
-					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.GetCount());
+					Assert.AreEqual(SupportClass.BitSetSupport.Cardinality(result), hc.GetCount());
 				// System.out.println(hc.getCount());
 			}
 			
@@ -237,7 +266,7 @@
 				nMatches += hc.GetCount();
 				ret += hc.GetSum();
 				if (validate)
-					Assert.AreEqual(SupportClass.Number.Cardinality(result), hc.GetCount());
+					Assert.AreEqual(SupportClass.BitSetSupport.Cardinality(result), hc.GetCount());
 				// System.out.println(hc.getCount());
 			}
 			System.Console.Out.WriteLine("Average number of matches=" + (nMatches / iter));
@@ -254,38 +283,16 @@
 			{
 				int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
 				BooleanQuery bq = new BooleanQuery();
-				System.Collections.BitArray termflag = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
+				System.Collections.BitArray termflag = new System.Collections.BitArray((termsInIndex % 64 == 0?termsInIndex / 64:termsInIndex / 64 + 1) * 64);
 				for (int j = 0; j < nClauses; j++)
 				{
 					int tnum;
 					// don't pick same clause twice
 					tnum = r.Next(termsInIndex);
 					if (termflag.Get(tnum))
-					{
-						int nextClearBit = -1;
-						for (int k = tnum + 1; k < termflag.Count; k++)
-						{
-							if (!termflag.Get(k))
-							{
-								nextClearBit = k;
-								break;
-							}
-						}
-						tnum = nextClearBit;
-					}
+						tnum = SupportClass.BitSetSupport.NextClearBit(termflag, tnum);
 					if (tnum < 0 || tnum >= termsInIndex)
-					{
-						int nextClearBit = -1;
-						for (int k = 0; k < termflag.Count; k++)
-						{
-							if (!termflag.Get(k))
-							{
-								nextClearBit = k;
-								break;
-							}
-						}
-						tnum = nextClearBit;
-					}
+						tnum = SupportClass.BitSetSupport.NextClearBit(termflag, 0);
 					termflag.Set(tnum, true);
 					Query tq = new TermQuery(terms[tnum]);
 					bq.Add(tq, BooleanClause.Occur.MUST);
@@ -315,38 +322,16 @@
 					
 					int nClauses = r.Next(maxClauses - 1) + 2; // min 2 clauses
 					BooleanQuery bq = new BooleanQuery();
-					System.Collections.BitArray termflag = new System.Collections.BitArray((termsInIndex % 64 == 0 ? termsInIndex / 64 : termsInIndex / 64 + 1) * 64);
+					System.Collections.BitArray termflag = new System.Collections.BitArray((termsInIndex % 64 == 0?termsInIndex / 64:termsInIndex / 64 + 1) * 64);
 					for (int j = 0; j < nClauses; j++)
 					{
 						int tnum;
 						// don't pick same clause twice
 						tnum = r.Next(termsInIndex);
 						if (termflag.Get(tnum))
-						{
-							int nextClearBit = -1;
-							for (int k = tnum + 1; k < termflag.Count; k++)
-							{
-								if (!termflag.Get(k))
-								{
-									nextClearBit = k;
-									break;
-								}
-							}
-							tnum = nextClearBit;
-						}
+							tnum = SupportClass.BitSetSupport.NextClearBit(termflag, tnum);
 						if (tnum < 0 || tnum >= 25)
-						{
-							int nextClearBit = -1;
-							for (int k = 0; k < termflag.Count; k++)
-							{
-								if (!termflag.Get(k))
-								{
-									nextClearBit = k;
-									break;
-								}
-							}
-							tnum = nextClearBit;
-						}
+							tnum = SupportClass.BitSetSupport.NextClearBit(termflag, 0);
 						termflag.Set(tnum, true);
 						Query tq = new TermQuery(terms[tnum]);
 						bq.Add(tq, BooleanClause.Occur.MUST);
@@ -377,7 +362,7 @@
 				for (int j = 0; j < nClauses; j++)
 				{
 					int tnum = r.Next(termsInIndex);
-					q.Add(new Term("f", System.Convert.ToString((char)(tnum + 'A'))), j);
+					q.Add(new Term("f", System.Convert.ToString((char) (tnum + 'A'))), j);
 				}
 				q.SetSlop(termsInIndex); // this could be random too
 				
@@ -389,10 +374,12 @@
 			return ret;
 		}
 		
+		
 		[Test]
 		public virtual void  TestConjunctions()
 		{
 			// test many small sets... the bugs will be found on boundary conditions
+			r = NewRandom();
 			CreateDummySearcher();
 			validate = true;
 			sets = RandBitSets(1000, 10);
@@ -404,30 +391,33 @@
 		/// <summary> 
 		/// int bigIter=10;
 		/// public void testConjunctionPerf() throws Exception {
-		/// CreateDummySearcher();
+		/// r = newRandom();
+		/// createDummySearcher();
 		/// validate=false;
-		/// sets=RandBitSets(32,1000000);
+		/// sets=randBitSets(32,1000000);
 		/// for (int i=0; i<bigIter; i++) {
 		/// long start = System.currentTimeMillis();
-		/// DoConjunctions(500,6);
+		/// doConjunctions(500,6);
 		/// long end = System.currentTimeMillis();
 		/// System.out.println("milliseconds="+(end-start));
 		/// }
 		/// s.close();
 		/// }
 		/// public void testNestedConjunctionPerf() throws Exception {
-		/// CreateDummySearcher();
+		/// r = newRandom();
+		/// createDummySearcher();
 		/// validate=false;
-		/// sets=RandBitSets(32,1000000);
+		/// sets=randBitSets(32,1000000);
 		/// for (int i=0; i<bigIter; i++) {
 		/// long start = System.currentTimeMillis();
-		/// DoNestedConjunctions(500,3,3);
+		/// doNestedConjunctions(500,3,3);
 		/// long end = System.currentTimeMillis();
 		/// System.out.println("milliseconds="+(end-start));
 		/// }
 		/// s.close();
 		/// }
 		/// public void testConjunctionTerms() throws Exception {
+		/// r = newRandom();
 		/// validate=false;
 		/// RAMDirectory dir = new RAMDirectory();
 		/// System.out.println("Creating index");
@@ -436,13 +426,14 @@
 		/// System.out.println("Starting performance test");
 		/// for (int i=0; i<bigIter; i++) {
 		/// long start = System.currentTimeMillis();
-		/// DoTermConjunctions(s,25,5,10000);
+		/// doTermConjunctions(s,25,5,1000);
 		/// long end = System.currentTimeMillis();
 		/// System.out.println("milliseconds="+(end-start));
 		/// }
 		/// s.close();
 		/// }
 		/// public void testNestedConjunctionTerms() throws Exception {
+		/// r = newRandom();
 		/// validate=false;    
 		/// RAMDirectory dir = new RAMDirectory();
 		/// System.out.println("Creating index");
@@ -458,15 +449,16 @@
 		/// s.close();
 		/// }
 		/// public void testSloppyPhrasePerf() throws Exception {
+		/// r = newRandom();
 		/// validate=false;    
 		/// RAMDirectory dir = new RAMDirectory();
 		/// System.out.println("Creating index");
-		/// CreateRandomTerms(100000,25,2,dir);
+		/// createRandomTerms(100000,25,2,dir);
 		/// s = new IndexSearcher(dir);
 		/// System.out.println("Starting performance test");
 		/// for (int i=0; i<bigIter; i++) {
 		/// long start = System.currentTimeMillis();
-		/// DoSloppyPhrase(s,25,2,1000);
+		/// doSloppyPhrase(s,25,2,1000);
 		/// long end = System.currentTimeMillis();
 		/// System.out.println("milliseconds="+(end-start));
 		/// }

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSearchHitsWithDeletions.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSearchHitsWithDeletions.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSearchHitsWithDeletions.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSearchHitsWithDeletions.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,6 +19,7 @@
 
 using NUnit.Framework;
 
+using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
 using IndexReader = Lucene.Net.Index.IndexReader;
@@ -26,7 +27,7 @@
 using Term = Lucene.Net.Index.Term;
 using Directory = Lucene.Net.Store.Directory;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
@@ -35,9 +36,10 @@
 	/// 
 	/// See {@link http://issues.apache.org/jira/browse/LUCENE-1096}.
 	/// </summary>
-    [System.Obsolete("Hits will be removed in Lucene 3.0")]
-	[TestFixture]
-	public class TestSearchHitsWithDeletions
+	/// <deprecated> Hits will be removed in Lucene 3.0
+	/// </deprecated>
+    [TestFixture]
+	public class TestSearchHitsWithDeletions:LuceneTestCase
 	{
 		
 		private static bool VERBOSE = false;
@@ -46,9 +48,10 @@
 		
 		private static Directory directory;
 		
-		[SetUp]
-		public virtual void  SetUp()
+		[Test]
+		public override void  SetUp()
 		{
+			base.SetUp();
 			// Create an index writer.
 			directory = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSetNorm.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSetNorm.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,12 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Lucene.Net.Documents;
 using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -34,14 +34,14 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Revision: 583534 $
+	/// <version>  $Revision: 787772 $
 	/// </version>
-	[TestFixture]
-	public class TestSetNorm : LuceneTestCase
+    [TestFixture]
+	public class TestSetNorm:LuceneTestCase
 	{
-		private class AnonymousClassHitCollector : HitCollector
+		private class AnonymousClassCollector:Collector
 		{
-			public AnonymousClassHitCollector(float[] scores, TestSetNorm enclosingInstance)
+			public AnonymousClassCollector(float[] scores, TestSetNorm enclosingInstance)
 			{
 				InitBlock(scores, enclosingInstance);
 			}
@@ -50,7 +50,6 @@
 				this.scores = scores;
 				this.enclosingInstance = enclosingInstance;
 			}
-
 			private float[] scores;
 			private TestSetNorm enclosingInstance;
 			public TestSetNorm Enclosing_Instance
@@ -61,21 +60,38 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private int base_Renamed = 0;
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				scores[doc + base_Renamed] = scorer.Score();
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				base_Renamed = docBase;
+			}
+			public override bool AcceptsDocsOutOfOrder()
 			{
-				scores[doc] = score;
+				return true;
 			}
 		}
-
+		public TestSetNorm(System.String name):base(name)
+		{
+		}
+		
 		[Test]
-		public virtual void  TestSetNorm_Renamed_Method()
+		public virtual void  TestSetNorm_Renamed()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			
 			// add the same document four times
 			Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
-			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			Document d1 = new Document();
 			d1.Add(f1);
 			writer.AddDocument(d1);
 			writer.AddDocument(d1);
@@ -94,7 +110,7 @@
 			// check that searches are ordered by this boost
 			float[] scores = new float[4];
 			
-			new IndexSearcher(store).Search(new TermQuery(new Term("field", "word")), new AnonymousClassHitCollector(scores, this));
+			new IndexSearcher(store).Search(new TermQuery(new Term("field", "word")), new AnonymousClassCollector(scores, this));
 			
 			float lastScore = 0.0f;
 			

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimilarity.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimilarity.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,13 @@
 
 using NUnit.Framework;
 
+using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
+using IndexReader = Lucene.Net.Index.IndexReader;
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using SimpleAnalyzer = Lucene.Net.Analysis.SimpleAnalyzer;
 using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
@@ -34,14 +35,14 @@
 	/// 
 	/// 
 	/// </summary>
-	/// <version>  $Revision: 583534 $
+	/// <version>  $Revision: 787772 $
 	/// </version>
-	[TestFixture]
-	public class TestSimilarity : LuceneTestCase
+    [TestFixture]
+	public class TestSimilarity:LuceneTestCase
 	{
-		private class AnonymousClassHitCollector : HitCollector
+		private class AnonymousClassCollector:Collector
 		{
-			public AnonymousClassHitCollector(TestSimilarity enclosingInstance)
+			public AnonymousClassCollector(TestSimilarity enclosingInstance)
 			{
 				InitBlock(enclosingInstance);
 			}
@@ -58,15 +59,26 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
+			{
+				Assert.IsTrue(scorer.Score() == 1.0f);
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+			}
+			public override bool AcceptsDocsOutOfOrder()
 			{
-				Assert.IsTrue(score == 1.0f);
+				return true;
 			}
 		}
-
-		private class AnonymousClassHitCollector1 : HitCollector
+		private class AnonymousClassCollector1:Collector
 		{
-			public AnonymousClassHitCollector1(TestSimilarity enclosingInstance)
+			public AnonymousClassCollector1(TestSimilarity enclosingInstance)
 			{
 				InitBlock(enclosingInstance);
 			}
@@ -83,16 +95,29 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private int base_Renamed = 0;
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
 			{
 				//System.out.println("Doc=" + doc + " score=" + score);
-				Assert.IsTrue(score == (float) doc + 1);
+				Assert.IsTrue(scorer.Score() == (float) doc + base_Renamed + 1);
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+				base_Renamed = docBase;
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
 			}
 		}
-
-		private class AnonymousClassHitCollector2 : HitCollector
+		private class AnonymousClassCollector2:Collector
 		{
-			public AnonymousClassHitCollector2(TestSimilarity enclosingInstance)
+			public AnonymousClassCollector2(TestSimilarity enclosingInstance)
 			{
 				InitBlock(enclosingInstance);
 			}
@@ -109,16 +134,27 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
 			{
 				//System.out.println("Doc=" + doc + " score=" + score);
-				Assert.IsTrue(score == 1.0f);
+				Assert.IsTrue(scorer.Score() == 1.0f);
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
 			}
 		}
-
-		private class AnonymousClassHitCollector3 : HitCollector
+		private class AnonymousClassCollector3:Collector
 		{
-			public AnonymousClassHitCollector3(TestSimilarity enclosingInstance)
+			public AnonymousClassCollector3(TestSimilarity enclosingInstance)
 			{
 				InitBlock(enclosingInstance);
 			}
@@ -135,16 +171,30 @@
 				}
 				
 			}
-			public override void  Collect(int doc, float score)
+			private Scorer scorer;
+			public override void  SetScorer(Scorer scorer)
+			{
+				this.scorer = scorer;
+			}
+			public override void  Collect(int doc)
 			{
 				//System.out.println("Doc=" + doc + " score=" + score);
-				Assert.IsTrue(score == 2.0f);
+				Assert.IsTrue(scorer.Score() == 2.0f);
+			}
+			public override void  SetNextReader(IndexReader reader, int docBase)
+			{
+			}
+			public override bool AcceptsDocsOutOfOrder()
+			{
+				return true;
 			}
 		}
-
+		public TestSimilarity(System.String name):base(name)
+		{
+		}
 		
 		[Serializable]
-		public class SimpleSimilarity : Similarity
+		public class SimpleSimilarity:Similarity
 		{
 			public override float LengthNorm(System.String field, int numTerms)
 			{
@@ -177,16 +227,16 @@
 		}
 		
 		[Test]
-		public virtual void  TestSimilarity_Renamed_Method()
+		public virtual void  TestSimilarity_Renamed()
 		{
 			RAMDirectory store = new RAMDirectory();
 			IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
 			writer.SetSimilarity(new SimpleSimilarity());
 			
-			Lucene.Net.Documents.Document d1 = new Lucene.Net.Documents.Document();
+			Document d1 = new Document();
 			d1.Add(new Field("field", "a c", Field.Store.YES, Field.Index.ANALYZED));
 			
-			Lucene.Net.Documents.Document d2 = new Lucene.Net.Documents.Document();
+			Document d2 = new Document();
 			d2.Add(new Field("field", "a b c", Field.Store.YES, Field.Index.ANALYZED));
 			
 			writer.AddDocument(d1);
@@ -201,23 +251,23 @@
 			Term b = new Term("field", "b");
 			Term c = new Term("field", "c");
 			
-			searcher.Search(new TermQuery(b), new AnonymousClassHitCollector(this));
+			searcher.Search(new TermQuery(b), new AnonymousClassCollector(this));
 			
 			BooleanQuery bq = new BooleanQuery();
 			bq.Add(new TermQuery(a), BooleanClause.Occur.SHOULD);
 			bq.Add(new TermQuery(b), BooleanClause.Occur.SHOULD);
 			//System.out.println(bq.toString("field"));
-			searcher.Search(bq, new AnonymousClassHitCollector1(this));
+			searcher.Search(bq, new AnonymousClassCollector1(this));
 			
 			PhraseQuery pq = new PhraseQuery();
-			pq.Add(a);
-			pq.Add(c);
+			pq.add(a);
+			pq.add(c);
 			//System.out.println(pq.toString("field"));
-			searcher.Search(pq, new AnonymousClassHitCollector2(this));
+			searcher.Search(pq, new AnonymousClassCollector2(this));
 			
 			pq.SetSlop(2);
 			//System.out.println(pq.toString("field"));
-			searcher.Search(pq, new AnonymousClassHitCollector3(this));
+			searcher.Search(pq, new AnonymousClassCollector3(this));
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimpleExplanations.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanations.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,12 +19,25 @@
 
 using NUnit.Framework;
 
+using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
+using Document = Lucene.Net.Documents.Document;
+using Field = Lucene.Net.Documents.Field;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using Directory = Lucene.Net.Store.Directory;
+using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory;
+using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
+using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
+using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
+
 namespace Lucene.Net.Search
 {
 	
+	
 	/// <summary> TestExplanations subclass focusing on basic query types</summary>
-	[TestFixture]
-	public class TestSimpleExplanations : TestExplanations
+    [TestFixture]
+	public class TestSimpleExplanations:TestExplanations
 	{
 		
 		// we focus on queries that don't rewrite to other queries.
@@ -33,13 +46,12 @@
 		
 		
 		/* simple term tests */
-
+		
 		[Test]
 		public virtual void  TestT1()
 		{
 			Qtest("w1", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestT2()
 		{
@@ -53,7 +65,6 @@
 		{
 			Qtest(new MatchAllDocsQuery(), new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestMA2()
 		{
@@ -69,39 +80,33 @@
 		{
 			Qtest("\"w1 w2\"", new int[]{0});
 		}
-
 		[Test]
 		public virtual void  TestP2()
 		{
 			Qtest("\"w1 w3\"", new int[]{1, 3});
 		}
-
 		[Test]
 		public virtual void  TestP3()
 		{
 			Qtest("\"w1 w2\"~1", new int[]{0, 1, 2});
 		}
-
 		[Test]
 		public virtual void  TestP4()
 		{
 			Qtest("\"w2 w3\"~1", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestP5()
 		{
 			Qtest("\"w3 w2\"~1", new int[]{1, 3});
 		}
-
 		[Test]
 		public virtual void  TestP6()
 		{
 			Qtest("\"w3 w2\"~2", new int[]{0, 1, 3});
 		}
-
-		[Test]
-		public virtual void  TestP7()
+        [Test]
+        public virtual void TestP7()
 		{
 			Qtest("\"w3 w2\"~3", new int[]{0, 1, 2, 3});
 		}
@@ -113,25 +118,21 @@
 		{
 			Qtest(new FilteredQuery(qp.Parse("w1"), new ItemizedFilter(new int[]{0, 1, 2, 3})), new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestFQ2()
 		{
 			Qtest(new FilteredQuery(qp.Parse("w1"), new ItemizedFilter(new int[]{0, 2, 3})), new int[]{0, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestFQ3()
 		{
 			Qtest(new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3})), new int[]{3});
 		}
-
 		[Test]
 		public virtual void  TestFQ4()
 		{
 			Qtest(new FilteredQuery(qp.Parse("xx^1000"), new ItemizedFilter(new int[]{1, 3})), new int[]{3});
 		}
-
 		[Test]
 		public virtual void  TestFQ6()
 		{
@@ -139,14 +140,6 @@
 			q.SetBoost(1000);
 			Qtest(q, new int[]{3});
 		}
-
-		[Test]
-		public virtual void  TestFQ7()
-		{
-			Query q = new FilteredQuery(qp.Parse("xx"), new ItemizedFilter(new int[]{1, 3}));
-			q.SetBoost(0);
-			Qtest(q, new int[]{3});
-		}
 		
 		/* ConstantScoreQueries */
 		
@@ -156,14 +149,12 @@
 			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{0, 1, 2, 3}));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestCSQ2()
 		{
 			Query q = new ConstantScoreQuery(new ItemizedFilter(new int[]{1, 3}));
 			Qtest(q, new int[]{1, 3});
 		}
-
 		[Test]
 		public virtual void  TestCSQ3()
 		{
@@ -173,16 +164,15 @@
 		}
 		
 		/* DisjunctionMaxQuery */
-		
-		[Test]
-		public virtual void  TestDMQ1()
+
+        [Test]
+        public virtual void TestDMQ1()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
 			q.Add(qp.Parse("w1"));
 			q.Add(qp.Parse("w5"));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestDMQ2()
 		{
@@ -191,7 +181,6 @@
 			q.Add(qp.Parse("w5"));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestDMQ3()
 		{
@@ -200,7 +189,6 @@
 			q.Add(qp.Parse("w5"));
 			Qtest(q, new int[]{0});
 		}
-
 		[Test]
 		public virtual void  TestDMQ4()
 		{
@@ -209,7 +197,6 @@
 			q.Add(qp.Parse("xx"));
 			Qtest(q, new int[]{2, 3});
 		}
-
 		[Test]
 		public virtual void  TestDMQ5()
 		{
@@ -218,16 +205,14 @@
 			q.Add(qp.Parse("xx"));
 			Qtest(q, new int[]{2, 3});
 		}
-
-		[Test]
-		public virtual void  TestDMQ6()
+        [Test]
+        public virtual void TestDMQ6()
 		{
 			DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
 			q.Add(qp.Parse("-yy w3"));
 			q.Add(qp.Parse("xx"));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestDMQ7()
 		{
@@ -236,7 +221,6 @@
 			q.Add(qp.Parse("w2"));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestDMQ8()
 		{
@@ -245,7 +229,6 @@
 			q.Add(qp.Parse("xx^100000"));
 			Qtest(q, new int[]{0, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestDMQ9()
 		{
@@ -265,7 +248,6 @@
 			q.Add(Ta(new System.String[]{"w2", "w3", "xx"}));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestMPQ2()
 		{
@@ -274,7 +256,6 @@
 			q.Add(Ta(new System.String[]{"w2", "w3"}));
 			Qtest(q, new int[]{0, 1, 3});
 		}
-
 		[Test]
 		public virtual void  TestMPQ3()
 		{
@@ -283,7 +264,6 @@
 			q.Add(Ta(new System.String[]{"w2", "w3"}));
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestMPQ4()
 		{
@@ -292,7 +272,6 @@
 			q.Add(Ta(new System.String[]{"w2"}));
 			Qtest(q, new int[]{0});
 		}
-
 		[Test]
 		public virtual void  TestMPQ5()
 		{
@@ -302,7 +281,6 @@
 			q.SetSlop(1);
 			Qtest(q, new int[]{0, 1, 2});
 		}
-
 		[Test]
 		public virtual void  TestMPQ6()
 		{
@@ -313,74 +291,63 @@
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
 		
-		/* some simple tests of bool queries containing term queries */
+		/* some simple tests of boolean queries containing term queries */
 		
 		[Test]
 		public virtual void  TestBQ1()
 		{
 			Qtest("+w1 +w2", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ2()
 		{
 			Qtest("+yy +w3", new int[]{2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ3()
 		{
 			Qtest("yy +w3", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ4()
 		{
 			Qtest("w1 (-xx w2)", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ5()
 		{
 			Qtest("w1 (+qq w2)", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ6()
 		{
 			Qtest("w1 -(-qq w5)", new int[]{1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ7()
 		{
 			Qtest("+w1 +(qq (xx -w2) (+w3 +w4))", new int[]{0});
 		}
-
 		[Test]
 		public virtual void  TestBQ8()
 		{
 			Qtest("+w1 (qq (xx -w2) (+w3 +w4))", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ9()
 		{
 			Qtest("+w1 (qq (-xx w2) -(+w3 +w4))", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ10()
 		{
 			Qtest("+w1 +(qq (-xx w2) -(+w3 +w4))", new int[]{1});
 		}
-
 		[Test]
 		public virtual void  TestBQ11()
 		{
 			Qtest("w1 w2^1000.0", new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ14()
 		{
@@ -389,7 +356,6 @@
 			q.Add(qp.Parse("w1"), BooleanClause.Occur.SHOULD);
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ15()
 		{
@@ -398,7 +364,6 @@
 			q.Add(qp.Parse("w1"), BooleanClause.Occur.SHOULD);
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ16()
 		{
@@ -407,16 +372,14 @@
 			q.Add(qp.Parse("w1 -xx"), BooleanClause.Occur.SHOULD);
 			Qtest(q, new int[]{0, 1});
 		}
-
-		[Test]
-		public virtual void  TestBQ17()
+        [Test]
+        public virtual void TestBQ17()
 		{
 			BooleanQuery q = new BooleanQuery(true);
 			q.Add(qp.Parse("w2"), BooleanClause.Occur.SHOULD);
 			q.Add(qp.Parse("w1 -xx"), BooleanClause.Occur.SHOULD);
 			Qtest(q, new int[]{0, 1, 2, 3});
 		}
-
 		[Test]
 		public virtual void  TestBQ19()
 		{
@@ -436,5 +399,69 @@
 			
 			Qtest(q, new int[]{0, 3});
 		}
+		
+		
+		[Test]
+		public virtual void  TestTermQueryMultiSearcherExplain()
+		{
+			// creating two directories for indices
+			Directory indexStoreA = new MockRAMDirectory();
+			Directory indexStoreB = new MockRAMDirectory();
+			
+			Document lDoc = new Document();
+			lDoc.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED));
+			Document lDoc2 = new Document();
+			lDoc2.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED));
+			Document lDoc3 = new Document();
+			lDoc3.Add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED));
+			
+			IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+			
+			writerA.AddDocument(lDoc);
+			writerA.AddDocument(lDoc2);
+			writerA.Optimize();
+			writerA.Close();
+			
+			writerB.AddDocument(lDoc3);
+			writerB.Close();
+			
+			QueryParser parser = new QueryParser("fulltext", new StandardAnalyzer());
+			Query query = parser.Parse("handle:1");
+			
+			Searcher[] searchers = new Searcher[2];
+			searchers[0] = new IndexSearcher(indexStoreB);
+			searchers[1] = new IndexSearcher(indexStoreA);
+			Searcher mSearcher = new MultiSearcher(searchers);
+			ScoreDoc[] hits = mSearcher.Search(query, null, 1000).scoreDocs;
+			
+			Assert.AreEqual(3, hits.Length);
+			
+			Explanation explain = mSearcher.Explain(query, hits[0].doc);
+			System.String exp = explain.ToString(0);
+			Assert.IsTrue(exp.IndexOf("maxDocs=3") > - 1, exp);
+			Assert.IsTrue(exp.IndexOf("docFreq=3") > - 1, exp);
+			
+			query = parser.Parse("handle:\"1 2\"");
+			hits = mSearcher.Search(query, null, 1000).scoreDocs;
+			
+			Assert.AreEqual(3, hits.Length);
+			
+			explain = mSearcher.Explain(query, hits[0].doc);
+			exp = explain.ToString(0);
+			Assert.IsTrue(exp.IndexOf("1=3") > - 1, exp);
+			Assert.IsTrue(exp.IndexOf("2=3") > - 1, exp);
+			
+			query = new SpanNearQuery(new SpanQuery[]{new SpanTermQuery(new Term("handle", "1")), new SpanTermQuery(new Term("handle", "2"))}, 0, true);
+			hits = mSearcher.Search(query, null, 1000).scoreDocs;
+			
+			Assert.AreEqual(3, hits.Length);
+			
+			explain = mSearcher.Explain(query, hits[0].doc);
+			exp = explain.ToString(0);
+			Assert.IsTrue(exp.IndexOf("1=3") > - 1, exp);
+			Assert.IsTrue(exp.IndexOf("2=3") > - 1, exp);
+			mSearcher.Close();
+		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSimpleExplanationsOfNonMatches.cs Tue Nov  3 18:06:27 2009
@@ -1,4 +1,4 @@
-/*
+/* 
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,22 +19,21 @@
 
 using NUnit.Framework;
 
-using RAMDirectory = Lucene.Net.Store.RAMDirectory;
-using IndexWriter = Lucene.Net.Index.IndexWriter;
-using IndexReader = Lucene.Net.Index.IndexReader;
-using Term = Lucene.Net.Index.Term;
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
 using Document = Lucene.Net.Documents.Document;
 using Field = Lucene.Net.Documents.Field;
-using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using IndexReader = Lucene.Net.Index.IndexReader;
+using IndexWriter = Lucene.Net.Index.IndexWriter;
+using Term = Lucene.Net.Index.Term;
 using ParseException = Lucene.Net.QueryParsers.ParseException;
+using QueryParser = Lucene.Net.QueryParsers.QueryParser;
+using RAMDirectory = Lucene.Net.Store.RAMDirectory;
 
 namespace Lucene.Net.Search
 {
 	
 	/// <summary> subclass of TestSimpleExplanations that verifies non matches.</summary>
-	[TestFixture]
-	public class TestSimpleExplanationsOfNonMatches : TestSimpleExplanations
+	public class TestSimpleExplanationsOfNonMatches:TestSimpleExplanations
 	{
 		
 		/// <summary> Overrides superclass to ignore matches and focus on non-matches

Modified: incubator/lucene.net/trunk/C#/src/Test/Search/TestSloppyPhraseQuery.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/C%23/src/Test/Search/TestSloppyPhraseQuery.cs?rev=832486&r1=832485&r2=832486&view=diff
==============================================================================
--- incubator/lucene.net/trunk/C#/src/Test/Search/TestSloppyPhraseQuery.cs (original)
+++ incubator/lucene.net/trunk/C#/src/Test/Search/TestSloppyPhraseQuery.cs Tue Nov  3 18:06:27 2009
@@ -1,20 +1,22 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+/* 
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
+using System;
+
 using NUnit.Framework;
 
 using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
@@ -23,155 +25,150 @@
 using IndexWriter = Lucene.Net.Index.IndexWriter;
 using Term = Lucene.Net.Index.Term;
 using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength;
-using IndexSearcher = Lucene.Net.Search.IndexSearcher;
-using PhraseQuery = Lucene.Net.Search.PhraseQuery;
 using RAMDirectory = Lucene.Net.Store.RAMDirectory;
+using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
 
 namespace Lucene.Net.Search
 {
+	
     [TestFixture]
-    public class TestSloppyPhraseQuery
-    {
-
-        private static readonly string S_1 = "A A A";
-        private static readonly string S_2 = "A 1 2 3 A 4 5 6 A";
-
-        private static readonly Document DOC_1 = makeDocument("X " + S_1 + " Y");
-        private static readonly Document DOC_2 = makeDocument("X " + S_2 + " Y");
-        private static readonly Document DOC_3 = makeDocument("X " + S_1 + " A Y");
-        private static readonly Document DOC_1_B = makeDocument("X " + S_1 + " Y N N N N " + S_1 + " Z");
-        private static readonly Document DOC_2_B = makeDocument("X " + S_2 + " Y N N N N " + S_2 + " Z");
-        private static readonly Document DOC_3_B = makeDocument("X " + S_1 + " A Y N N N N " + S_1 + " A Y");
-        private static readonly Document DOC_4 = makeDocument("A A X A X B A X B B A A X B A A");
-
-        private static readonly PhraseQuery QUERY_1 = makePhraseQuery(S_1);
-        private static readonly PhraseQuery QUERY_2 = makePhraseQuery(S_2);
-        private static readonly PhraseQuery QUERY_4 = makePhraseQuery("X A A");
-
-        /**
-         * Test DOC_4 and QUERY_4.
-         * QUERY_4 has a fuzzy (len=1) match to DOC_4, so all slop values > 0 should succeed.
-         * But only the 3rd sequence of A's in DOC_4 will do.
-         */
-        [Test]
-        public void TestDoc4_Query4_All_Slops_Should_match()
-        {
-            for (int slop = 0; slop < 30; slop++)
-            {
-                int numResultsExpected = slop < 1 ? 0 : 1;
-                checkPhraseQuery(DOC_4, QUERY_4, slop, numResultsExpected);
-            }
-        }
-
-        /**
-         * Test DOC_1 and QUERY_1.
-         * QUERY_1 has an exact match to DOC_1, so all slop values should succeed.
-         * Before LUCENE-1310, a slop value of 1 did not succeed.
-         */
-        [Test]
-        public void TestDoc1_Query1_All_Slops_Should_match()
-        {
-            for (int slop = 0; slop < 30; slop++)
-            {
-                float score1 = checkPhraseQuery(DOC_1, QUERY_1, slop, 1);
-                float score2 = checkPhraseQuery(DOC_1_B, QUERY_1, slop, 1);
-                Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
-            }
-        }
-
-        /**
-         * Test DOC_2 and QUERY_1.
-         * 6 should be the minimum slop to make QUERY_1 match DOC_2.
-         * Before LUCENE-1310, 7 was the minimum.
-         */
-        [Test]
-        public void TestDoc2_Query1_Slop_6_or_more_Should_match()
-        {
-            for (int slop = 0; slop < 30; slop++)
-            {
-                int numResultsExpected = slop < 6 ? 0 : 1;
-                float score1 = checkPhraseQuery(DOC_2, QUERY_1, slop, numResultsExpected);
-                if (numResultsExpected > 0)
-                {
-                    float score2 = checkPhraseQuery(DOC_2_B, QUERY_1, slop, 1);
-                    Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
-                }
-            }
-        }
-
-        /**
-         * Test DOC_2 and QUERY_2.
-         * QUERY_2 has an exact match to DOC_2, so all slop values should succeed.
-         * Before LUCENE-1310, 0 succeeds, 1 through 7 fail, and 8 or greater succeeds.
-         */
-        [Test]
-        public void TestDoc2_Query2_All_Slops_Should_match()
-        {
-            for (int slop = 0; slop < 30; slop++)
-            {
-                float score1 = checkPhraseQuery(DOC_2, QUERY_2, slop, 1);
-                float score2 = checkPhraseQuery(DOC_2_B, QUERY_2, slop, 1);
-                Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
-            }
-        }
-
-        /**
-         * Test DOC_3 and QUERY_1.
-         * QUERY_1 has an exact match to DOC_3, so all slop values should succeed.
-         */
-        [Test]
-        public void TestDoc3_Query1_All_Slops_Should_match()
-        {
-            for (int slop = 0; slop < 30; slop++)
-            {
-                float score1 = checkPhraseQuery(DOC_3, QUERY_1, slop, 1);
-                float score2 = checkPhraseQuery(DOC_3_B, QUERY_1, slop, 1);
-                Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
-            }
-        }
-
-        private float checkPhraseQuery(Document doc, PhraseQuery query, int slop, int expectedNumResults)
-        {
-            query.SetSlop(slop);
-
-            RAMDirectory ramDir = new RAMDirectory();
-            WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
-            IndexWriter writer = new IndexWriter(ramDir, analyzer, MaxFieldLength.UNLIMITED);
-            writer.AddDocument(doc);
-            writer.Close();
-
-            IndexSearcher searcher = new IndexSearcher(ramDir);
-            TopDocs td = searcher.Search(query, null, 10);
-            //System.out.println("slop: "+slop+"  query: "+query+"  doc: "+doc+"  Expecting number of hits: "+expectedNumResults+" maxScore="+td.getMaxScore());
-            Assert.AreEqual(expectedNumResults, td.totalHits, "slop: " + slop + "  query: " + query + "  doc: " + doc + "  Wrong number of hits");
-
-            //QueryUtils.check(query,searcher);
-
-            searcher.Close();
-            ramDir.Close();
-
-            return td.GetMaxScore();
-        }
-
-        private static Document makeDocument(string docText)
-        {
-            Document doc = new Document();
-            Field f = new Field("f", docText, Field.Store.NO, Field.Index.ANALYZED);
-            f.SetOmitNorms(true);
-            doc.Add(f);
-            return doc;
-        }
-
-        private static PhraseQuery makePhraseQuery(string terms)
-        {
-            PhraseQuery query = new PhraseQuery();
-            string[] t = terms.Split(new string[] { " " }, System.StringSplitOptions.RemoveEmptyEntries);
-            for (int i = 0; i < t.Length; i++)
-            {
-                query.Add(new Term("f", t[i]));
-            }
-            return query;
-        }
-
-    }
-}
+	public class TestSloppyPhraseQuery:LuceneTestCase
+	{
+		
+		private const System.String S_1 = "A A A";
+		private const System.String S_2 = "A 1 2 3 A 4 5 6 A";
+		
+		private static readonly Document DOC_1 = MakeDocument("X " + S_1 + " Y");
+		private static readonly Document DOC_2 = MakeDocument("X " + S_2 + " Y");
+		private static readonly Document DOC_3 = MakeDocument("X " + S_1 + " A Y");
+		private static readonly Document DOC_1_B = MakeDocument("X " + S_1 + " Y N N N N " + S_1 + " Z");
+		private static readonly Document DOC_2_B = MakeDocument("X " + S_2 + " Y N N N N " + S_2 + " Z");
+		private static readonly Document DOC_3_B = MakeDocument("X " + S_1 + " A Y N N N N " + S_1 + " A Y");
+		private static readonly Document DOC_4 = MakeDocument("A A X A X B A X B B A A X B A A");
+		
+		private static readonly PhraseQuery QUERY_1 = MakePhraseQuery(S_1);
+		private static readonly PhraseQuery QUERY_2 = MakePhraseQuery(S_2);
+		private static readonly PhraseQuery QUERY_4 = MakePhraseQuery("X A A");
+		
+		
+		/// <summary> Test DOC_4 and QUERY_4.
+		/// QUERY_4 has a fuzzy (len=1) match to DOC_4, so all slop values > 0 should succeed.
+		/// But only the 3rd sequence of A's in DOC_4 will do.
+		/// </summary>
+		[Test]
+		public virtual void  TestDoc4_Query4_All_Slops_Should_match()
+		{
+			for (int slop = 0; slop < 30; slop++)
+			{
+				int numResultsExpected = slop < 1?0:1;
+				CheckPhraseQuery(DOC_4, QUERY_4, slop, numResultsExpected);
+			}
+		}
+		
+		/// <summary> Test DOC_1 and QUERY_1.
+		/// QUERY_1 has an exact match to DOC_1, so all slop values should succeed.
+		/// Before LUCENE-1310, a slop value of 1 did not succeed.
+		/// </summary>
+		[Test]
+		public virtual void  TestDoc1_Query1_All_Slops_Should_match()
+		{
+			for (int slop = 0; slop < 30; slop++)
+			{
+				float score1 = CheckPhraseQuery(DOC_1, QUERY_1, slop, 1);
+				float score2 = CheckPhraseQuery(DOC_1_B, QUERY_1, slop, 1);
+				Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
+			}
+		}
+		
+		/// <summary> Test DOC_2 and QUERY_1.
+		/// 6 should be the minimum slop to make QUERY_1 match DOC_2.
+		/// Before LUCENE-1310, 7 was the minimum.
+		/// </summary>
+		[Test]
+		public virtual void  TestDoc2_Query1_Slop_6_or_more_Should_match()
+		{
+			for (int slop = 0; slop < 30; slop++)
+			{
+				int numResultsExpected = slop < 6?0:1;
+				float score1 = CheckPhraseQuery(DOC_2, QUERY_1, slop, numResultsExpected);
+				if (numResultsExpected > 0)
+				{
+					float score2 = CheckPhraseQuery(DOC_2_B, QUERY_1, slop, 1);
+					Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
+				}
+			}
+		}
+		
+		/// <summary> Test DOC_2 and QUERY_2.
+		/// QUERY_2 has an exact match to DOC_2, so all slop values should succeed.
+		/// Before LUCENE-1310, 0 succeeds, 1 through 7 fail, and 8 or greater succeeds.
+		/// </summary>
+		[Test]
+		public virtual void  TestDoc2_Query2_All_Slops_Should_match()
+		{
+			for (int slop = 0; slop < 30; slop++)
+			{
+				float score1 = CheckPhraseQuery(DOC_2, QUERY_2, slop, 1);
+				float score2 = CheckPhraseQuery(DOC_2_B, QUERY_2, slop, 1);
+				Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
+			}
+		}
+		
+		/// <summary> Test DOC_3 and QUERY_1.
+		/// QUERY_1 has an exact match to DOC_3, so all slop values should succeed.
+		/// </summary>
+		[Test]
+		public virtual void  TestDoc3_Query1_All_Slops_Should_match()
+		{
+			for (int slop = 0; slop < 30; slop++)
+			{
+				float score1 = CheckPhraseQuery(DOC_3, QUERY_1, slop, 1);
+				float score2 = CheckPhraseQuery(DOC_3_B, QUERY_1, slop, 1);
+				Assert.IsTrue(score2 > score1, "slop=" + slop + " score2=" + score2 + " should be greater than score1 " + score1);
+			}
+		}
+		
+		private float CheckPhraseQuery(Document doc, PhraseQuery query, int slop, int expectedNumResults)
+		{
+			query.SetSlop(slop);
+			
+			RAMDirectory ramDir = new RAMDirectory();
+			WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
+			IndexWriter writer = new IndexWriter(ramDir, analyzer, MaxFieldLength.UNLIMITED);
+			writer.AddDocument(doc);
+			writer.Close();
+			
+			IndexSearcher searcher = new IndexSearcher(ramDir);
+			TopDocs td = searcher.Search(query, null, 10);
+			//System.out.println("slop: "+slop+"  query: "+query+"  doc: "+doc+"  Expecting number of hits: "+expectedNumResults+" maxScore="+td.getMaxScore());
+			Assert.AreEqual(expectedNumResults, td.totalHits, "slop: " + slop + "  query: " + query + "  doc: " + doc + "  Wrong number of hits");
+			
+			//QueryUtils.check(query,searcher);
+			
+			searcher.Close();
+			ramDir.Close();
+			
+			return td.GetMaxScore();
+		}
+		
+		private static Document MakeDocument(System.String docText)
+		{
+			Document doc = new Document();
+			Field f = new Field("f", docText, Field.Store.NO, Field.Index.ANALYZED);
+			f.SetOmitNorms(true);
+			doc.Add(f);
+			return doc;
+		}
+		
+		private static PhraseQuery MakePhraseQuery(System.String terms)
+		{
+			PhraseQuery query = new PhraseQuery();
+			System.String[] t = System.Text.RegularExpressions.Regex.Split(terms, " +");
+			for (int i = 0; i < t.Length; i++)
+			{
+				query.add(new Term("f", t[i]));
+			}
+			return query;
+		}
+	}
+}
\ No newline at end of file



Mime
View raw message