lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [08/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory
Date Sun, 26 Feb 2017 23:36:56 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs b/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
new file mode 100644
index 0000000..2bbb21c
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestCachingWrapperFilter.cs
@@ -0,0 +1,513 @@
+using System;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Index;
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using IBits = Lucene.Net.Util.IBits;
+    using Directory = Lucene.Net.Store.Directory;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FixedBitSet = Lucene.Net.Util.FixedBitSet;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SerialMergeScheduler = Lucene.Net.Index.SerialMergeScheduler;
+    using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+
+    [TestFixture]
+    public class TestCachingWrapperFilter : LuceneTestCase
+    {
+        internal Directory Dir;
+        internal DirectoryReader Ir;
+        internal IndexSearcher @is;
+        internal RandomIndexWriter Iw;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+            Iw = new RandomIndexWriter(Random(), Dir, Similarity, TimeZone);
+            Document doc = new Document();
+            Field idField = new StringField("id", "", Field.Store.NO);
+            doc.Add(idField);
+            // add 500 docs with id 0..499
+            for (int i = 0; i < 500; i++)
+            {
+                idField.SetStringValue(Convert.ToString(i));
+                Iw.AddDocument(doc);
+            }
+            // delete 20 of them
+            for (int i = 0; i < 20; i++)
+            {
+                Iw.DeleteDocuments(new Term("id", Convert.ToString(Random().Next(Iw.MaxDoc))));
+            }
+            Ir = Iw.Reader;
+            @is = NewSearcher(Ir);
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            IOUtils.Close(Iw, Ir, Dir);
+            base.TearDown();
+        }
+
+        private void AssertFilterEquals(Filter f1, Filter f2)
+        {
+            Query query = new MatchAllDocsQuery();
+            TopDocs hits1 = @is.Search(query, f1, Ir.MaxDoc);
+            TopDocs hits2 = @is.Search(query, f2, Ir.MaxDoc);
+            Assert.AreEqual(hits1.TotalHits, hits2.TotalHits);
+            CheckHits.CheckEqual(query, hits1.ScoreDocs, hits2.ScoreDocs);
+            // now do it again to confirm caching works
+            TopDocs hits3 = @is.Search(query, f1, Ir.MaxDoc);
+            TopDocs hits4 = @is.Search(query, f2, Ir.MaxDoc);
+            Assert.AreEqual(hits3.TotalHits, hits4.TotalHits);
+            CheckHits.CheckEqual(query, hits3.ScoreDocs, hits4.ScoreDocs);
+        }
+
+        /// <summary>
+        /// test null iterator </summary>
+        [Test]
+        public virtual void TestEmpty()
+        {
+            Query query = new BooleanQuery();
+            Filter expected = new QueryWrapperFilter(query);
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        /// <summary>
+        /// test iterator returns NO_MORE_DOCS </summary>
+        [Test]
+        public virtual void TestEmpty2()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term("id", "0")), Occur.MUST);
+            query.Add(new TermQuery(new Term("id", "0")), Occur.MUST_NOT);
+            Filter expected = new QueryWrapperFilter(query);
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        /// <summary>
+        /// test null docidset </summary>
+        [Test]
+        public virtual void TestEmpty3()
+        {
+            Filter expected = new PrefixFilter(new Term("bogusField", "bogusVal"));
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        /// <summary>
+        /// test iterator returns single document </summary>
+        [Test]
+        public virtual void TestSingle()
+        {
+            for (int i = 0; i < 10; i++)
+            {
+                int id = Random().Next(Ir.MaxDoc);
+                Query query = new TermQuery(new Term("id", Convert.ToString(id)));
+                Filter expected = new QueryWrapperFilter(query);
+                Filter actual = new CachingWrapperFilter(expected);
+                AssertFilterEquals(expected, actual);
+            }
+        }
+
+        /// <summary>
+        /// test sparse filters (match single documents) </summary>
+        [Test]
+        public virtual void TestSparse()
+        {
+            for (int i = 0; i < 10; i++)
+            {
+                int id_start = Random().Next(Ir.MaxDoc - 1);
+                int id_end = id_start + 1;
+                Query query = TermRangeQuery.NewStringRange("id", Convert.ToString(id_start), Convert.ToString(id_end), true, true);
+                Filter expected = new QueryWrapperFilter(query);
+                Filter actual = new CachingWrapperFilter(expected);
+                AssertFilterEquals(expected, actual);
+            }
+        }
+
+        /// <summary>
+        /// test dense filters (match entire index) </summary>
+        [Test]
+        public virtual void TestDense()
+        {
+            Query query = new MatchAllDocsQuery();
+            Filter expected = new QueryWrapperFilter(query);
+            Filter actual = new CachingWrapperFilter(expected);
+            AssertFilterEquals(expected, actual);
+        }
+
+        [Test]
+        public virtual void TestCachingWorks()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+            MockFilter filter = new MockFilter();
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+
+            // first time, nested filter is called
+            DocIdSet strongRef = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            Assert.IsTrue(filter.WasCalled(), "first time");
+
+            // make sure no exception if cache is holding the wrong docIdSet
+            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+
+            // second time, nested filter should not be called
+            filter.Clear();
+            cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            Assert.IsFalse(filter.WasCalled(), "second time");
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestNullDocIdSet()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+
+            Filter filter = new FilterAnonymousInnerClassHelper(this, context);
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+
+            // the caching filter should return the empty set constant
+            //Assert.IsNull(cacher.GetDocIdSet(context, "second time", (context.AtomicReader).LiveDocs));
+            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper : Filter
+        {
+            private readonly TestCachingWrapperFilter OuterInstance;
+
+            private AtomicReaderContext Context;
+
+            public FilterAnonymousInnerClassHelper(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
+            {
+                this.OuterInstance = outerInstance;
+                this.Context = context;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return null;
+            }
+        }
+
+        [Test]
+        public virtual void TestNullDocIdSetIterator()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+
+            Filter filter = new FilterAnonymousInnerClassHelper2(this, context);
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+
+            // the caching filter should return the empty set constant
+            Assert.IsNull(cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs));
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper2 : Filter
+        {
+            private readonly TestCachingWrapperFilter OuterInstance;
+
+            private AtomicReaderContext Context;
+
+            public FilterAnonymousInnerClassHelper2(TestCachingWrapperFilter outerInstance, AtomicReaderContext context)
+            {
+                this.OuterInstance = outerInstance;
+                this.Context = context;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return new DocIdSetAnonymousInnerClassHelper(this);
+            }
+
+            private class DocIdSetAnonymousInnerClassHelper : DocIdSet
+            {
+                private readonly FilterAnonymousInnerClassHelper2 OuterInstance;
+
+                public DocIdSetAnonymousInnerClassHelper(FilterAnonymousInnerClassHelper2 outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override DocIdSetIterator GetIterator()
+                {
+                    return null;
+                }
+            }
+        }
+
+        private static void AssertDocIdSetCacheable(IndexReader reader, Filter filter, bool shouldCacheable)
+        {
+            Assert.IsTrue(reader.Context is AtomicReaderContext);
+            AtomicReaderContext context = (AtomicReaderContext)reader.Context;
+            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
+            DocIdSet originalSet = filter.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            DocIdSet cachedSet = cacher.GetDocIdSet(context, (context.AtomicReader).LiveDocs);
+            if (originalSet == null)
+            {
+                Assert.IsNull(cachedSet);
+            }
+            if (cachedSet == null)
+            {
+                Assert.IsTrue(originalSet == null || originalSet.GetIterator() == null);
+            }
+            else
+            {
+                Assert.IsTrue(cachedSet.IsCacheable);
+                Assert.AreEqual(shouldCacheable, originalSet.IsCacheable);
+                //System.out.println("Original: "+originalSet.getClass().getName()+" -- cached: "+cachedSet.getClass().getName());
+                if (originalSet.IsCacheable)
+                {
+                    Assert.AreEqual(originalSet.GetType(), cachedSet.GetType(), "Cached DocIdSet must be of same class like uncached, if cacheable");
+                }
+                else
+                {
+                    Assert.IsTrue(cachedSet is FixedBitSet || cachedSet == null, "Cached DocIdSet must be an FixedBitSet if the original one was not cacheable");
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestIsCacheAble()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            writer.AddDocument(new Document());
+            writer.Dispose();
+
+            IndexReader reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
+
+            // not cacheable:
+            AssertDocIdSetCacheable(reader, new QueryWrapperFilter(new TermQuery(new Term("test", "value"))), false);
+            // returns default empty docidset, always cacheable:
+            AssertDocIdSetCacheable(reader, NumericRangeFilter.NewInt32Range("test", Convert.ToInt32(10000), Convert.ToInt32(-10000), true, true), true);
+            // is cacheable:
+            AssertDocIdSetCacheable(reader, FieldCacheRangeFilter.NewInt32Range("test", Convert.ToInt32(10), Convert.ToInt32(20), true, true), true);
+            // a fixedbitset filter is always cacheable
+            AssertDocIdSetCacheable(reader, new FilterAnonymousInnerClassHelper3(this), true);
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private class FilterAnonymousInnerClassHelper3 : Filter
+        {
+            private readonly TestCachingWrapperFilter OuterInstance;
+
+            public FilterAnonymousInnerClassHelper3(TestCachingWrapperFilter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override DocIdSet GetDocIdSet(AtomicReaderContext context, IBits acceptDocs)
+            {
+                return new FixedBitSet(context.Reader.MaxDoc);
+            }
+        }
+
+        [Test]
+        public virtual void TestEnforceDeletions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(NewLogMergePolicy(10)));
+            // asserts below requires no unexpected merges:
+
+            // NOTE: cannot use writer.getReader because RIW (on
+            // flipping a coin) may give us a newly opened reader,
+            // but we use .reopen on this reader below and expect to
+            // (must) get an NRT reader:
+            DirectoryReader reader = DirectoryReader.Open(writer.w, true);
+            // same reason we don't wrap?
+            IndexSearcher searcher = NewSearcher(reader, false, Similarity);
+
+            // add a doc, refresh the reader, and check that it's there
+            Document doc = new Document();
+            doc.Add(NewStringField("id", "1", Field.Store.YES));
+            writer.AddDocument(doc);
+
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            TopDocs docs = searcher.Search(new MatchAllDocsQuery(), 1);
+            Assert.AreEqual(1, docs.TotalHits, "Should find a hit...");
+
+            Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));
+
+            CachingWrapperFilter filter = new CachingWrapperFilter(startFilter);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.IsTrue(filter.SizeInBytes() > 0);
+
+            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
+
+            Query constantScore = new ConstantScoreQuery(filter);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
+
+            // make sure we get a cache hit when we reopen reader
+            // that had no change to deletions
+
+            // fake delete (deletes nothing):
+            writer.DeleteDocuments(new Term("foo", "bar"));
+
+            IndexReader oldReader = reader;
+            reader = RefreshReader(reader);
+            Assert.IsTrue(reader == oldReader);
+            int missCount = filter.missCount;
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
+
+            // cache hit:
+            Assert.AreEqual(missCount, filter.missCount);
+
+            // now delete the doc, refresh the reader, and see that it's not there
+            writer.DeleteDocuments(new Term("id", "1"));
+
+            // NOTE: important to hold ref here so GC doesn't clear
+            // the cache entry!  Else the assert below may sometimes
+            // fail:
+            oldReader = reader;
+            reader = RefreshReader(reader);
+
+            searcher = NewSearcher(reader, false, Similarity);
+
+            missCount = filter.missCount;
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
+
+            // cache hit
+            Assert.AreEqual(missCount, filter.missCount);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
+
+            // apply deletes dynamically:
+            filter = new CachingWrapperFilter(startFilter);
+            writer.AddDocument(doc);
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[query + filter] Should find a hit...");
+            missCount = filter.missCount;
+            Assert.IsTrue(missCount > 0);
+            constantScore = new ConstantScoreQuery(filter);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(1, docs.TotalHits, "[just filter] Should find a hit...");
+            Assert.AreEqual(missCount, filter.missCount);
+
+            writer.AddDocument(doc);
+
+            // NOTE: important to hold ref here so GC doesn't clear
+            // the cache entry!  Else the assert below may sometimes
+            // fail:
+            oldReader = reader;
+
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(2, docs.TotalHits, "[query + filter] Should find 2 hits...");
+            Assert.IsTrue(filter.missCount > missCount);
+            missCount = filter.missCount;
+
+            constantScore = new ConstantScoreQuery(filter);
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(2, docs.TotalHits, "[just filter] Should find a hit...");
+            Assert.AreEqual(missCount, filter.missCount);
+
+            // now delete the doc, refresh the reader, and see that it's not there
+            writer.DeleteDocuments(new Term("id", "1"));
+
+            reader = RefreshReader(reader);
+            searcher = NewSearcher(reader, false, Similarity);
+
+            docs = searcher.Search(new MatchAllDocsQuery(), filter, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[query + filter] Should *not* find a hit...");
+            // CWF reused the same entry (it dynamically applied the deletes):
+            Assert.AreEqual(missCount, filter.missCount);
+
+            docs = searcher.Search(constantScore, 1);
+            Assert.AreEqual(0, docs.TotalHits, "[just filter] Should *not* find a hit...");
+            // CWF reused the same entry (it dynamically applied the deletes):
+            Assert.AreEqual(missCount, filter.missCount);
+
+            // NOTE: silliness to make sure JRE does not eliminate
+            // our holding onto oldReader to prevent
+            // CachingWrapperFilter's WeakHashMap from dropping the
+            // entry:
+            Assert.IsTrue(oldReader != null);
+
+            reader.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        private static DirectoryReader RefreshReader(DirectoryReader reader)
+        {
+            DirectoryReader oldReader = reader;
+            reader = DirectoryReader.OpenIfChanged(reader);
+            if (reader != null)
+            {
+                oldReader.Dispose();
+                return reader;
+            }
+            else
+            {
+                return oldReader;
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs b/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
new file mode 100644
index 0000000..4408fa9
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestComplexExplanations.cs
@@ -0,0 +1,389 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    using Lucene.Net.Search.Spans;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// TestExplanations subclass that builds up super crazy complex queries
+    /// on the assumption that if the explanations work out right for them,
+    /// they should work for anything.
+    /// </summary>
+    [TestFixture]
+    public class TestComplexExplanations : TestExplanations
+    {
+        /// <summary>
+        /// Override the Similarity used in our searcher with one that plays
+        /// nice with boosts of 0.0
+        /// </summary>
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Searcher.Similarity = CreateQnorm1Similarity();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Searcher.Similarity = IndexSearcher.DefaultSimilarity;
+            base.TearDown();
+        }
+
+        // must be static for weight serialization tests
+        private static DefaultSimilarity CreateQnorm1Similarity()
+        {
+            return new DefaultSimilarityAnonymousInnerClassHelper();
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            public DefaultSimilarityAnonymousInnerClassHelper()
+            {
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1.0f; // / (float) Math.sqrt(1.0f + sumOfSquaredWeights);
+            }
+        }
+
+        [Test]
+        public virtual void Test1()
+        {
+            BooleanQuery q = new BooleanQuery();
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Slop = 1;
+            phraseQuery.Add(new Term(FIELD, "w1"));
+            phraseQuery.Add(new Term(FIELD, "w2"));
+            q.Add(phraseQuery, Occur.MUST);
+            q.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true), Occur.SHOULD);
+            q.Add(Snear(Sf("w3", 2), St("w2"), St("w3"), 5, true), Occur.SHOULD);
+
+            Query t = new FilteredQuery(new TermQuery(new Term(FIELD, "xx")), new ItemizedFilter(new int[] { 1, 3 }));
+            t.Boost = 1000;
+            q.Add(t, Occur.SHOULD);
+
+            t = new ConstantScoreQuery(new ItemizedFilter(new int[] { 0, 2 }));
+            t.Boost = 30;
+            q.Add(t, Occur.SHOULD);
+
+            DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f);
+            dm.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true));
+            dm.Add(new TermQuery(new Term(FIELD, "QQ")));
+
+            BooleanQuery xxYYZZ = new BooleanQuery();
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "xx")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "zz")), Occur.MUST_NOT);
+
+            dm.Add(xxYYZZ);
+
+            BooleanQuery xxW1 = new BooleanQuery();
+            xxW1.Add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST_NOT);
+            xxW1.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST_NOT);
+
+            dm.Add(xxW1);
+
+            DisjunctionMaxQuery dm2 = new DisjunctionMaxQuery(0.5f);
+            dm2.Add(new TermQuery(new Term(FIELD, "w1")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w2")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w3")));
+            dm.Add(dm2);
+
+            q.Add(dm, Occur.SHOULD);
+
+            BooleanQuery b = new BooleanQuery();
+            b.MinimumNumberShouldMatch = 2;
+            b.Add(Snear("w1", "w2", 1, true), Occur.SHOULD);
+            b.Add(Snear("w2", "w3", 1, true), Occur.SHOULD);
+            b.Add(Snear("w1", "w3", 3, true), Occur.SHOULD);
+
+            q.Add(b, Occur.SHOULD);
+
+            Qtest(q, new int[] { 0, 1, 2 });
+        }
+
+        [Test]
+        public virtual void Test2()
+        {
+            BooleanQuery q = new BooleanQuery();
+
+            PhraseQuery phraseQuery = new PhraseQuery();
+            phraseQuery.Slop = 1;
+            phraseQuery.Add(new Term(FIELD, "w1"));
+            phraseQuery.Add(new Term(FIELD, "w2"));
+            q.Add(phraseQuery, Occur.MUST);
+            q.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true), Occur.SHOULD);
+            q.Add(Snear(Sf("w3", 2), St("w2"), St("w3"), 5, true), Occur.SHOULD);
+
+            Query t = new FilteredQuery(new TermQuery(new Term(FIELD, "xx")), new ItemizedFilter(new int[] { 1, 3 }));
+            t.Boost = 1000;
+            q.Add(t, Occur.SHOULD);
+
+            t = new ConstantScoreQuery(new ItemizedFilter(new int[] { 0, 2 }));
+            t.Boost = -20.0f;
+            q.Add(t, Occur.SHOULD);
+
+            DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f);
+            dm.Add(Snear(St("w2"), Sor("w5", "zz"), 4, true));
+            dm.Add(new TermQuery(new Term(FIELD, "QQ")));
+
+            BooleanQuery xxYYZZ = new BooleanQuery();
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "xx")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
+            xxYYZZ.Add(new TermQuery(new Term(FIELD, "zz")), Occur.MUST_NOT);
+
+            dm.Add(xxYYZZ);
+
+            BooleanQuery xxW1 = new BooleanQuery();
+            xxW1.Add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST_NOT);
+            xxW1.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST_NOT);
+
+            dm.Add(xxW1);
+
+            DisjunctionMaxQuery dm2 = new DisjunctionMaxQuery(0.5f);
+            dm2.Add(new TermQuery(new Term(FIELD, "w1")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w2")));
+            dm2.Add(new TermQuery(new Term(FIELD, "w3")));
+            dm.Add(dm2);
+
+            q.Add(dm, Occur.SHOULD);
+
+            BooleanQuery b = new BooleanQuery();
+            b.MinimumNumberShouldMatch = 2;
+            b.Add(Snear("w1", "w2", 1, true), Occur.SHOULD);
+            b.Add(Snear("w2", "w3", 1, true), Occur.SHOULD);
+            b.Add(Snear("w1", "w3", 3, true), Occur.SHOULD);
+            b.Boost = 0.0f;
+
+            q.Add(b, Occur.SHOULD);
+
+            Qtest(q, new int[] { 0, 1, 2 });
+        }
+
+        // :TODO: we really need more crazy complex cases.
+
+        // //////////////////////////////////////////////////////////////////
+
+        // The rest of these aren't that complex, but they are <i>somewhat</i>
+        // complex, and they expose weakness in dealing with queries that match
+        // with scores of 0 wrapped in other queries
+
+        [Test]
+        public virtual void TestT3()
+        {
+            TermQuery query = new TermQuery(new Term(FIELD, "w1"));
+            query.Boost = 0;
+            Bqtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestMA3()
+        {
+            Query q = new MatchAllDocsQuery();
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestFQ5()
+        {
+            TermQuery query = new TermQuery(new Term(FIELD, "xx"));
+            query.Boost = 0;
+            Bqtest(new FilteredQuery(query, new ItemizedFilter(new int[] { 1, 3 })), new int[] { 3 });
+        }
+
+        [Test]
+        public virtual void TestCSQ4()
+        {
+            Query q = new ConstantScoreQuery(new ItemizedFilter(new int[] { 3 }));
+            q.Boost = 0;
+            Bqtest(q, new int[] { 3 });
+        }
+
+        [Test]
+        public virtual void TestDMQ10()
+        {
+            DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.5f);
+
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
+            boostedQuery.Boost = 100;
+            query.Add(boostedQuery, Occur.SHOULD);
+
+            q.Add(query);
+
+            TermQuery xxBoostedQuery = new TermQuery(new Term(FIELD, "xx"));
+            xxBoostedQuery.Boost = 0;
+
+            q.Add(xxBoostedQuery);
+            q.Boost = 0.0f;
+            Bqtest(q, new int[] { 0, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestMPQ7()
+        {
+            MultiPhraseQuery q = new MultiPhraseQuery();
+            q.Add(Ta(new string[] { "w1" }));
+            q.Add(Ta(new string[] { "w2" }));
+            q.Slop = 1;
+            q.Boost = 0.0f;
+            Bqtest(q, new int[] { 0, 1, 2 });
+        }
+
+        [Test]
+        public virtual void TestBQ12()
+        {
+            // NOTE: using qtest not bqtest
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w2"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.SHOULD);
+
+            Qtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ13()
+        {
+            // NOTE: using qtest not bqtest
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.MUST_NOT);
+
+            Qtest(query, new int[] { 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ18()
+        {
+            // NOTE: using qtest not bqtest
+            BooleanQuery query = new BooleanQuery();
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
+
+            Qtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ21()
+        {
+            BooleanQuery query = new BooleanQuery();
+            query.Add(new TermQuery(new Term(FIELD, "w1")), Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
+            query.Boost = 0;
+
+            Bqtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestBQ22()
+        {
+            BooleanQuery query = new BooleanQuery();
+            TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
+            boostedQuery.Boost = 0;
+            query.Add(boostedQuery, Occur.MUST);
+            query.Add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
+            query.Boost = 0;
+
+            Bqtest(query, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST3()
+        {
+            SpanQuery q = St("w1");
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestST6()
+        {
+            SpanQuery q = St("xx");
+            q.Boost = 0;
+            Qtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSF3()
+        {
+            SpanQuery q = Sf(("w1"), 1);
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSF7()
+        {
+            SpanQuery q = Sf(("xx"), 3);
+            q.Boost = 0;
+            Bqtest(q, new int[] { 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot3()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("QQ"));
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot6()
+        {
+            SpanQuery q = Snot(Sf("w1", 10), St("xx"));
+            q.Boost = 0;
+            Bqtest(q, new int[] { 0, 1, 2, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot8()
+        {
+            // NOTE: using qtest not bqtest
+            SpanQuery f = Snear("w1", "w3", 10, true);
+            f.Boost = 0;
+            SpanQuery q = Snot(f, St("xx"));
+            Qtest(q, new int[] { 0, 1, 3 });
+        }
+
+        [Test]
+        public virtual void TestSNot9()
+        {
+            // NOTE: using qtest not bqtest
+            SpanQuery t = St("xx");
+            t.Boost = 0;
+            SpanQuery q = Snot(Snear("w1", "w3", 10, true), t);
+            Qtest(q, new int[] { 0, 1, 3 });
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs b/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs
new file mode 100644
index 0000000..18a2759
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestComplexExplanationsOfNonMatches.cs
@@ -0,0 +1,197 @@
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    /// <summary>
+    /// subclass of TestSimpleExplanations that verifies non matches.
+    /// </summary>
+    [TestFixture]
+    public class TestComplexExplanationsOfNonMatches : TestComplexExplanations
+    {
+        /// <summary>
+        /// Overrides superclass to ignore matches and focus on non-matches
+        /// </summary>
+        /// <seealso cref= CheckHits#checkNoMatchExplanations </seealso>
+        public override void Qtest(Query q, int[] expDocNrs)
+        {
+            CheckHits.CheckNoMatchExplanations(q, FIELD, Searcher, expDocNrs);
+        }
+
+
+        #region TestComplexExplanations
+        // LUCENENET NOTE: Tests in a base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+        [Test]
+        public override void Test1()
+        {
+            base.Test1();
+        }
+
+        [Test]
+        public override void Test2()
+        {
+            base.Test2();
+        }
+
+        // :TODO: we really need more crazy complex cases.
+
+        // //////////////////////////////////////////////////////////////////
+
+        // The rest of these aren't that complex, but they are <i>somewhat</i>
+        // complex, and they expose weakness in dealing with queries that match
+        // with scores of 0 wrapped in other queries
+
+        [Test]
+        public override void TestT3()
+        {
+            base.TestT3();
+        }
+
+        [Test]
+        public override void TestMA3()
+        {
+            base.TestMA3();
+        }
+
+        [Test]
+        public override void TestFQ5()
+        {
+            base.TestFQ5();
+        }
+
+        [Test]
+        public override void TestCSQ4()
+        {
+            base.TestCSQ4();
+        }
+
+        [Test]
+        public override void TestDMQ10()
+        {
+            base.TestDMQ10();
+        }
+
+        [Test]
+        public override void TestMPQ7()
+        {
+            base.TestMPQ7();
+        }
+
+        [Test]
+        public override void TestBQ12()
+        {
+            base.TestBQ12();
+        }
+
+        [Test]
+        public override void TestBQ13()
+        {
+            base.TestBQ13();
+        }
+
+        [Test]
+        public override void TestBQ18()
+        {
+            base.TestBQ18();
+        }
+
+        [Test]
+        public override void TestBQ21()
+        {
+            base.TestBQ21();
+        }
+
+        [Test]
+        public override void TestBQ22()
+        {
+            base.TestBQ22();
+        }
+
+        [Test]
+        public override void TestST3()
+        {
+            base.TestST3();
+        }
+
+        [Test]
+        public override void TestST6()
+        {
+            base.TestST6();
+        }
+
+        [Test]
+        public override void TestSF3()
+        {
+            base.TestSF3();
+        }
+
+        [Test]
+        public override void TestSF7()
+        {
+            base.TestSF7();
+        }
+
+        [Test]
+        public override void TestSNot3()
+        {
+            base.TestSNot3();
+        }
+
+        [Test]
+        public override void TestSNot6()
+        {
+            base.TestSNot6();
+        }
+
+        [Test]
+        public override void TestSNot8()
+        {
+            base.TestSNot8();
+        }
+
+        [Test]
+        public override void TestSNot9()
+        {
+            base.TestSNot9();
+        }
+
+        #endregion
+
+        #region TestExplanations
+        // LUCENENET NOTE: Tests in an abstract base class are not pulled into the correct
+        // context in Visual Studio. This fixes that with the minimum amount of code necessary
+        // to run them in the correct context without duplicating all of the tests.
+
+
+        /// <summary>
+        /// Placeholder: JUnit freaks if you don't have one test ... making
+        /// class abstract doesn't help
+        /// </summary>
+        [Test]
+        public override void TestNoop()
+        {
+            base.TestNoop();
+        }
+
+        #endregion
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestConjunctions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestConjunctions.cs b/src/Lucene.Net.Tests/Search/TestConjunctions.cs
new file mode 100644
index 0000000..2f5cdc2
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestConjunctions.cs
@@ -0,0 +1,161 @@
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using FieldInvertState = Lucene.Net.Index.FieldInvertState;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Similarity = Lucene.Net.Search.Similarities.Similarity;
+    using Store = Field.Store;
+    using StringField = StringField;
+    using Term = Lucene.Net.Index.Term;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestConjunctions : LuceneTestCase
+    {
+        internal Analyzer Analyzer;
+        internal Directory Dir;
+        internal IndexReader Reader;
+        internal IndexSearcher Searcher;
+
+        internal const string F1 = "title";
+        internal const string F2 = "body";
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Analyzer = new MockAnalyzer(Random());
+            Dir = NewDirectory();
+            IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, Analyzer);
+            config.SetMergePolicy(NewLogMergePolicy()); // we will use docids to validate
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), Dir, config);
+            writer.AddDocument(Doc("lucene", "lucene is a very popular search engine library"));
+            writer.AddDocument(Doc("solr", "solr is a very popular search server and is using lucene"));
+            writer.AddDocument(Doc("nutch", "nutch is an internet search engine with web crawler and is using lucene and hadoop"));
+            Reader = writer.Reader;
+            writer.Dispose();
+            Searcher = NewSearcher(Reader);
+            Searcher.Similarity = new TFSimilarity();
+        }
+
+        internal static Document Doc(string v1, string v2)
+        {
+            Document doc = new Document();
+            doc.Add(new StringField(F1, v1, Store.YES));
+            doc.Add(new TextField(F2, v2, Store.YES));
+            return doc;
+        }
+
+        [Test]
+        public virtual void TestTermConjunctionsWithOmitTF()
+        {
+            BooleanQuery bq = new BooleanQuery();
+            bq.Add(new TermQuery(new Term(F1, "nutch")), Occur.MUST);
+            bq.Add(new TermQuery(new Term(F2, "is")), Occur.MUST);
+            TopDocs td = Searcher.Search(bq, 3);
+            Assert.AreEqual(1, td.TotalHits);
+            Assert.AreEqual(3F, td.ScoreDocs[0].Score, 0.001F); // f1:nutch + f2:is + f2:is
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Reader.Dispose();
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        // Similarity that returns the TF as score
+        private class TFSimilarity : Similarity
+        {
+            public override long ComputeNorm(FieldInvertState state)
+            {
+                return 1; // we dont care
+            }
+
+            public override SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats)
+            {
+                return new SimWeightAnonymousInnerClassHelper(this);
+            }
+
+            private class SimWeightAnonymousInnerClassHelper : SimWeight
+            {
+                private readonly TFSimilarity OuterInstance;
+
+                public SimWeightAnonymousInnerClassHelper(TFSimilarity outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override float GetValueForNormalization()
+                {
+                    return 1; // we don't care
+                }
+
+                public override void Normalize(float queryNorm, float topLevelBoost)
+                {
+                    // we don't care
+                }
+            }
+
+            public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext context)
+            {
+                return new SimScorerAnonymousInnerClassHelper(this);
+            }
+
+            private class SimScorerAnonymousInnerClassHelper : SimScorer
+            {
+                private readonly TFSimilarity OuterInstance;
+
+                public SimScorerAnonymousInnerClassHelper(TFSimilarity outerInstance)
+                {
+                    this.OuterInstance = outerInstance;
+                }
+
+                public override float Score(int doc, float freq)
+                {
+                    return freq;
+                }
+
+                public override float ComputeSlopFactor(int distance)
+                {
+                    return 1F;
+                }
+
+                public override float ComputePayloadFactor(int doc, int start, int end, BytesRef payload)
+                {
+                    return 1F;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
new file mode 100644
index 0000000..e458c63
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestConstantScoreQuery.cs
@@ -0,0 +1,241 @@
+using System.Diagnostics;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Search
+{
+    using NUnit.Framework;
+    using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext;
+    using DefaultSimilarity = Lucene.Net.Search.Similarities.DefaultSimilarity;
+    using Directory = Lucene.Net.Store.Directory;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Document = Documents.Document;
+    using Field = Field;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using Term = Lucene.Net.Index.Term;
+
+    /// <summary>
+    /// this class only tests some basic functionality in CSQ, the main parts are mostly
+    /// tested by MultiTermQuery tests, explanations seems to be tested in TestExplanations!
+    /// </summary>
+    [TestFixture]
+    public class TestConstantScoreQuery : LuceneTestCase
+    {
+        [Test]
+        public virtual void TestCSQ()
+        {
+            Query q1 = new ConstantScoreQuery(new TermQuery(new Term("a", "b")));
+            Query q2 = new ConstantScoreQuery(new TermQuery(new Term("a", "c")));
+            Query q3 = new ConstantScoreQuery(TermRangeFilter.NewStringRange("a", "b", "c", true, true));
+            QueryUtils.Check(q1);
+            QueryUtils.Check(q2);
+            QueryUtils.CheckEqual(q1, q1);
+            QueryUtils.CheckEqual(q2, q2);
+            QueryUtils.CheckEqual(q3, q3);
+            QueryUtils.CheckUnequal(q1, q2);
+            QueryUtils.CheckUnequal(q2, q3);
+            QueryUtils.CheckUnequal(q1, q3);
+            QueryUtils.CheckUnequal(q1, new TermQuery(new Term("a", "b")));
+        }
+
+        private void CheckHits(IndexSearcher searcher, Query q, float expectedScore, string scorerClassName, string innerScorerClassName)
+        {
+            int[] count = new int[1];
+            searcher.Search(q, new CollectorAnonymousInnerClassHelper(this, expectedScore, scorerClassName, innerScorerClassName, count));
+            Assert.AreEqual(1, count[0], "invalid number of results");
+        }
+
+        private class CollectorAnonymousInnerClassHelper : ICollector
+        {
+            private readonly TestConstantScoreQuery OuterInstance;
+
+            private float ExpectedScore;
+            private string ScorerClassName;
+            private string InnerScorerClassName;
+            private int[] Count;
+
+            public CollectorAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance, float expectedScore, string scorerClassName, string innerScorerClassName, int[] count)
+            {
+                this.OuterInstance = outerInstance;
+                this.ExpectedScore = expectedScore;
+                this.ScorerClassName = scorerClassName;
+                this.InnerScorerClassName = innerScorerClassName;
+                this.Count = count;
+            }
+
+            private Scorer scorer;
+
+            public virtual void SetScorer(Scorer scorer)
+            {
+                this.scorer = scorer;
+                Assert.AreEqual(ScorerClassName, scorer.GetType().Name, "Scorer is implemented by wrong class");
+                if (InnerScorerClassName != null && scorer is ConstantScoreQuery.ConstantScorer)
+                {
+                    ConstantScoreQuery.ConstantScorer innerScorer = (ConstantScoreQuery.ConstantScorer)scorer;
+                    Assert.AreEqual(InnerScorerClassName, innerScorer.docIdSetIterator.GetType().Name, "inner Scorer is implemented by wrong class");
+                }
+            }
+
+            public virtual void Collect(int doc)
+            {
+                Assert.AreEqual(ExpectedScore, this.scorer.GetScore(), 0, "Score differs from expected");
+                Count[0]++;
+            }
+
+            public virtual void SetNextReader(AtomicReaderContext context)
+            {
+            }
+
+            public virtual bool AcceptsDocsOutOfOrder
+            {
+                get { return true; }
+            }
+        }
+
+        [Test]
+        public virtual void TestWrapped2Times()
+        {
+            Directory directory = null;
+            IndexReader reader = null;
+            IndexSearcher searcher = null;
+            try
+            {
+                directory = NewDirectory();
+                RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, Similarity, TimeZone);
+
+                Document doc = new Document();
+                doc.Add(NewStringField("field", "term", Field.Store.NO));
+                writer.AddDocument(doc);
+
+                reader = writer.Reader;
+                writer.Dispose();
+                // we don't wrap with AssertingIndexSearcher in order to have the original scorer in setScorer.
+                searcher = NewSearcher(reader, true, false);
+
+                // set a similarity that does not normalize our boost away
+                searcher.Similarity = new DefaultSimilarityAnonymousInnerClassHelper(this);
+
+                Query csq1 = new ConstantScoreQuery(new TermQuery(new Term("field", "term")));
+                csq1.Boost = 2.0f;
+                Query csq2 = new ConstantScoreQuery(csq1);
+                csq2.Boost = 5.0f;
+
+                BooleanQuery bq = new BooleanQuery();
+                bq.Add(csq1, Occur.SHOULD);
+                bq.Add(csq2, Occur.SHOULD);
+
+                Query csqbq = new ConstantScoreQuery(bq);
+                csqbq.Boost = 17.0f;
+
+                CheckHits(searcher, csq1, csq1.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, null);
+                CheckHits(searcher, csq2, csq2.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, typeof(ConstantScoreQuery.ConstantScorer).Name);
+
+                // for the combined BQ, the scorer should always be BooleanScorer's BucketScorer, because our scorer supports out-of order collection!
+                string bucketScorerClass = typeof(FakeScorer).Name;
+                CheckHits(searcher, bq, csq1.Boost + csq2.Boost, bucketScorerClass, null);
+                CheckHits(searcher, csqbq, csqbq.Boost, typeof(ConstantScoreQuery.ConstantScorer).Name, bucketScorerClass);
+            }
+            finally
+            {
+                if (reader != null)
+                {
+                    reader.Dispose();
+                }
+                if (directory != null)
+                {
+                    directory.Dispose();
+                }
+            }
+        }
+
+        private class DefaultSimilarityAnonymousInnerClassHelper : DefaultSimilarity
+        {
+            private readonly TestConstantScoreQuery OuterInstance;
+
+            public DefaultSimilarityAnonymousInnerClassHelper(TestConstantScoreQuery outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            public override float QueryNorm(float sumOfSquaredWeights)
+            {
+                return 1.0f;
+            }
+        }
+
+        [Test]
+        public virtual void TestConstantScoreQueryAndFilter()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "a", Field.Store.NO));
+            w.AddDocument(doc);
+            doc = new Document();
+            doc.Add(NewStringField("field", "b", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Filter filterB = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "b"))));
+            Query query = new ConstantScoreQuery(filterB);
+
+            IndexSearcher s = NewSearcher(r);
+            Assert.AreEqual(1, s.Search(query, filterB, 1).TotalHits); // Query for field:b, Filter field:b
+
+            Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a"))));
+            query = new ConstantScoreQuery(filterA);
+
+            Assert.AreEqual(0, s.Search(query, filterB, 1).TotalHits); // Query field:b, Filter field:a
+
+            r.Dispose();
+            d.Dispose();
+        }
+
+        // LUCENE-5307
+        // don't reuse the scorer of filters since they have been created with bulkScorer=false
+        [Test]
+        public virtual void TestQueryWrapperFilter()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("field", "a", Field.Store.NO));
+            w.AddDocument(doc);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Filter filter = new QueryWrapperFilter(AssertingQuery.Wrap(Random(), new TermQuery(new Term("field", "a"))));
+            IndexSearcher s = NewSearcher(r);
+            Debug.Assert(s is AssertingIndexSearcher);
+            // this used to fail
+            s.Search(new ConstantScoreQuery(filter), new TotalHitCountCollector());
+
+            // check the rewrite
+            Query rewritten = (new ConstantScoreQuery(filter)).Rewrite(r);
+            Assert.IsTrue(rewritten is ConstantScoreQuery);
+            Assert.IsTrue(((ConstantScoreQuery)rewritten).Query is AssertingQuery);
+
+            r.Dispose();
+            d.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
new file mode 100644
index 0000000..d447b9e
--- /dev/null
+++ b/src/Lucene.Net.Tests/Search/TestControlledRealTimeReopenThread.cs
@@ -0,0 +1,731 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Lucene.Net.Randomized.Generators;
+using Lucene.Net.Support;
+using NUnit.Framework;
+
+namespace Lucene.Net.Search
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using Document = Lucene.Net.Documents.Document;
+    using Field = Lucene.Net.Documents.Field;
+    using TextField = Lucene.Net.Documents.TextField;
+    using DirectoryReader = Lucene.Net.Index.DirectoryReader;
+    using IndexCommit = Lucene.Net.Index.IndexCommit;
+    using IndexReader = Lucene.Net.Index.IndexReader;
+    using IndexWriter = Lucene.Net.Index.IndexWriter;
+    using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
+    using IIndexableField = Lucene.Net.Index.IIndexableField;
+    using KeepOnlyLastCommitDeletionPolicy = Lucene.Net.Index.KeepOnlyLastCommitDeletionPolicy;
+    using NoMergePolicy = Lucene.Net.Index.NoMergePolicy;
+    using OpenMode = Lucene.Net.Index.OpenMode;
+    using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter;
+    using SnapshotDeletionPolicy = Lucene.Net.Index.SnapshotDeletionPolicy;
+    using Term = Lucene.Net.Index.Term;
+    using ThreadedIndexingAndSearchingTestCase = Lucene.Net.Index.ThreadedIndexingAndSearchingTestCase;
+    using TrackingIndexWriter = Lucene.Net.Index.TrackingIndexWriter;
+    using Directory = Lucene.Net.Store.Directory;
+    using NRTCachingDirectory = Lucene.Net.Store.NRTCachingDirectory;
+    using IOUtils = Lucene.Net.Util.IOUtils;
+    using SuppressCodecs = Lucene.Net.Util.LuceneTestCase.SuppressCodecs;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    //using ThreadInterruptedException = Lucene.Net.Util.ThreadInterruptedException;
+    using Version = Lucene.Net.Util.LuceneVersion;
+
+    [SuppressCodecs("SimpleText", "Memory", "Direct")]
+    [TestFixture]
+    public class TestControlledRealTimeReopenThread : ThreadedIndexingAndSearchingTestCase
+    {
+
+        // Not guaranteed to reflect deletes:
+        private SearcherManager NrtNoDeletes;
+
+        // Is guaranteed to reflect deletes:
+        private SearcherManager NrtDeletes;
+
+        private TrackingIndexWriter GenWriter;
+
+        private ControlledRealTimeReopenThread<IndexSearcher> NrtDeletesThread;
+        private ControlledRealTimeReopenThread<IndexSearcher> NrtNoDeletesThread;
+
+        private readonly ThreadLocal<long?> LastGens = new ThreadLocal<long?>();
+        private bool WarmCalled;
+
+        [Test]
+        public virtual void TestControlledRealTimeReopenThread_Mem()
+        {
+            RunTest("TestControlledRealTimeReopenThread");
+        }
+
+        protected internal override IndexSearcher FinalSearcher
+        {
+            get
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: finalSearcher maxGen=" + MaxGen);
+                }
+                NrtDeletesThread.WaitForGeneration(MaxGen);
+                return NrtDeletes.Acquire();
+            }
+        }
+
+        protected internal override Directory GetDirectory(Directory @in)
+        {
+            // Randomly swap in NRTCachingDir
+            if (Random().NextBoolean())
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine("TEST: wrap NRTCachingDir");
+                }
+
+                return new NRTCachingDirectory(@in, 5.0, 60.0);
+            }
+            else
+            {
+                return @in;
+            }
+        }
+
+        protected internal override void UpdateDocuments(Term id, IEnumerable<IEnumerable<IIndexableField>> docs)
+        {
+            long gen = GenWriter.UpdateDocuments(id, docs);
+
+            // Randomly verify the update "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(docs.Count(), s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtDeletes.Release(s);
+                }
+            }
+
+            LastGens.Value = gen;
+
+        }
+
+        protected internal override void AddDocuments(Term id, IEnumerable<IEnumerable<IIndexableField>> docs)
+        {
+            long gen = GenWriter.AddDocuments(docs);
+            // Randomly verify the add "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtNoDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtNoDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(docs.Count(), s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtNoDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void AddDocument(Term id, IEnumerable<IIndexableField> doc)
+        {
+            long gen = GenWriter.AddDocument(doc);
+
+            // Randomly verify the add "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtNoDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtNoDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(1, s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtNoDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void UpdateDocument(Term id, IEnumerable<IIndexableField> doc)
+        {
+            long gen = GenWriter.UpdateDocument(id, doc);
+            // Randomly verify the udpate "took":
+            if (Random().Next(20) == 2)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify " + id);
+                }
+                NrtDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(1, s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void DeleteDocuments(Term id)
+        {
+            long gen = GenWriter.DeleteDocuments(id);
+            // randomly verify the delete "took":
+            if (Random().Next(20) == 7)
+            {
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: verify del " + id);
+                }
+                NrtDeletesThread.WaitForGeneration(gen);
+                IndexSearcher s = NrtDeletes.Acquire();
+                if (VERBOSE)
+                {
+                    Console.WriteLine(Thread.CurrentThread.Name + ": nrt: got searcher=" + s);
+                }
+                try
+                {
+                    Assert.AreEqual(0, s.Search(new TermQuery(id), 10).TotalHits);
+                }
+                finally
+                {
+                    NrtDeletes.Release(s);
+                }
+            }
+            LastGens.Value = gen;
+        }
+
+        protected internal override void DoAfterWriter(TaskScheduler es)
+        {
+            double minReopenSec = 0.01 + 0.05 * Random().NextDouble();
+            double maxReopenSec = minReopenSec * (1.0 + 10 * Random().NextDouble());
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: make SearcherManager maxReopenSec=" + maxReopenSec + " minReopenSec=" + minReopenSec);
+            }
+
+            GenWriter = new TrackingIndexWriter(Writer);
+
+            SearcherFactory sf = new SearcherFactoryAnonymousInnerClassHelper(this, es);
+
+            NrtNoDeletes = new SearcherManager(Writer, false, sf);
+            NrtDeletes = new SearcherManager(Writer, true, sf);
+
+            NrtDeletesThread = new ControlledRealTimeReopenThread<IndexSearcher>(GenWriter, NrtDeletes, maxReopenSec, minReopenSec);
+            NrtDeletesThread.Name = "NRTDeletes Reopen Thread";
+            NrtDeletesThread.Priority = (ThreadPriority)Math.Min((int)Thread.CurrentThread.Priority + 2, (int)ThreadPriority.Highest);
+            NrtDeletesThread.SetDaemon(true);
+            NrtDeletesThread.Start();
+
+            NrtNoDeletesThread = new ControlledRealTimeReopenThread<IndexSearcher>(GenWriter, NrtNoDeletes, maxReopenSec, minReopenSec);
+            NrtNoDeletesThread.Name = "NRTNoDeletes Reopen Thread";
+            NrtNoDeletesThread.Priority = (ThreadPriority)Math.Min((int)Thread.CurrentThread.Priority + 2, (int)ThreadPriority.Highest);
+            NrtNoDeletesThread.SetDaemon(true);
+            NrtNoDeletesThread.Start();
+        }
+
+        private class SearcherFactoryAnonymousInnerClassHelper : SearcherFactory
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private TaskScheduler Es;
+
+            public SearcherFactoryAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, TaskScheduler es)
+            {
+                this.OuterInstance = outerInstance;
+                this.Es = es;
+            }
+
+            public override IndexSearcher NewSearcher(IndexReader r)
+            {
+                OuterInstance.WarmCalled = true;
+                IndexSearcher s = new IndexSearcher(r, Es);
+                s.Search(new TermQuery(new Term("body", "united")), 10);
+                return s;
+            }
+        }
+
+        protected internal override void DoAfterIndexingThreadDone()
+        {
+            long? gen = LastGens.Value;
+            if (gen != null)
+            {
+                AddMaxGen((long)gen);
+            }
+        }
+
+        private long MaxGen = -1;
+
+        private void AddMaxGen(long gen)
+        {
+            lock (this)
+            {
+                MaxGen = Math.Max(gen, MaxGen);
+            }
+        }
+
+        protected internal override void DoSearching(TaskScheduler es, DateTime stopTime)
+        {
+            RunSearchThreads(stopTime);
+        }
+
+        protected internal override IndexSearcher CurrentSearcher
+        {
+            get
+            {
+                // Test doesn't assert deletions until the end, so we
+                // can randomize whether dels must be applied
+                SearcherManager nrt;
+                if (Random().NextBoolean())
+                {
+                    nrt = NrtDeletes;
+                }
+                else
+                {
+                    nrt = NrtNoDeletes;
+                }
+
+                return nrt.Acquire();
+            }
+        }
+
+        protected internal override void ReleaseSearcher(IndexSearcher s)
+        {
+            // NOTE: a bit iffy... technically you should release
+            // against the same SearcherManager you acquired from... but
+            // both impls just decRef the underlying reader so we
+            // can get away w/ cheating:
+            NrtNoDeletes.Release(s);
+        }
+
+        protected internal override void DoClose()
+        {
+            Assert.IsTrue(WarmCalled);
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: now close SearcherManagers");
+            }
+            NrtDeletesThread.Dispose();
+            NrtDeletes.Dispose();
+            NrtNoDeletesThread.Dispose();
+            NrtNoDeletes.Dispose();
+        }
+
+        /*
+         * LUCENE-3528 - NRTManager hangs in certain situations 
+         */
+        [Test]
+        public virtual void TestThreadStarvationNoDeleteNRTReader()
+        {
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            conf.SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES);
+            Directory d = NewDirectory();
+            CountdownEvent latch = new CountdownEvent(1);
+            CountdownEvent signal = new CountdownEvent(1);
+
+            LatchedIndexWriter _writer = new LatchedIndexWriter(d, conf, latch, signal);
+            TrackingIndexWriter writer = new TrackingIndexWriter(_writer);
+            SearcherManager manager = new SearcherManager(_writer, false, null);
+            Document doc = new Document();
+            doc.Add(NewTextField("test", "test", Field.Store.YES));
+            writer.AddDocument(doc);
+            manager.MaybeRefresh();
+            ThreadClass t = new ThreadAnonymousInnerClassHelper(this, latch, signal, writer, manager);
+            t.Start();
+            _writer.WaitAfterUpdate = true; // wait in addDocument to let some reopens go through
+            long lastGen = writer.UpdateDocument(new Term("foo", "bar"), doc); // once this returns the doc is already reflected in the last reopen
+
+            Assert.IsFalse(manager.IsSearcherCurrent()); // false since there is a delete in the queue
+
+            IndexSearcher searcher = manager.Acquire();
+            try
+            {
+                Assert.AreEqual(2, searcher.IndexReader.NumDocs);
+            }
+            finally
+            {
+                manager.Release(searcher);
+            }
+            ControlledRealTimeReopenThread<IndexSearcher> thread = new ControlledRealTimeReopenThread<IndexSearcher>(writer, manager, 0.01, 0.01);
+            thread.Start(); // start reopening
+            if (VERBOSE)
+            {
+                Console.WriteLine("waiting now for generation " + lastGen);
+            }
+
+            AtomicBoolean finished = new AtomicBoolean(false);
+            ThreadClass waiter = new ThreadAnonymousInnerClassHelper2(this, lastGen, thread, finished);
+            waiter.Start();
+            manager.MaybeRefresh();
+            waiter.Join(1000);
+            if (!finished.Get())
+            {
+                waiter.Interrupt();
+                Assert.Fail("thread deadlocked on waitForGeneration");
+            }
+            thread.Dispose();
+            thread.Join();
+            IOUtils.Close(manager, _writer, d);
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private CountdownEvent Latch;
+            private CountdownEvent Signal;
+            private TrackingIndexWriter Writer;
+            private SearcherManager Manager;
+
+            public ThreadAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, CountdownEvent latch, CountdownEvent signal, TrackingIndexWriter writer, SearcherManager manager)
+            {
+                this.OuterInstance = outerInstance;
+                this.Latch = latch;
+                this.Signal = signal;
+                this.Writer = writer;
+                this.Manager = manager;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Signal.Wait();
+                    Manager.MaybeRefresh();
+                    Writer.DeleteDocuments(new TermQuery(new Term("foo", "barista")));
+                    Manager.MaybeRefresh(); // kick off another reopen so we inc. the internal gen
+                }
+                catch (Exception e)
+                {
+                    Console.WriteLine(e.ToString());
+                    Console.Write(e.StackTrace);
+                }
+                finally
+                {
+                    Latch.Reset(Latch.CurrentCount == 0 ? 0 : Latch.CurrentCount - 1); // let the add below finish
+                }
+            }
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private long LastGen;
+            private ControlledRealTimeReopenThread<IndexSearcher> thread;
+            private AtomicBoolean Finished;
+
+            public ThreadAnonymousInnerClassHelper2(TestControlledRealTimeReopenThread outerInstance, long lastGen, ControlledRealTimeReopenThread<IndexSearcher> thread, AtomicBoolean finished)
+            {
+                this.OuterInstance = outerInstance;
+                this.LastGen = lastGen;
+                this.thread = thread;
+                this.Finished = finished;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    thread.WaitForGeneration(LastGen);
+                }
+                catch (ThreadInterruptedException ie)
+                {
+                    Thread.CurrentThread.Interrupt();
+                    throw new Exception(ie.Message, ie);
+                }
+                Finished.Set(true);
+            }
+        }
+
+        public class LatchedIndexWriter : IndexWriter
+        {
+
+            internal CountdownEvent Latch;
+            internal bool WaitAfterUpdate = false;
+            internal CountdownEvent Signal;
+
+            public LatchedIndexWriter(Directory d, IndexWriterConfig conf, CountdownEvent latch, CountdownEvent signal)
+                : base(d, conf)
+            {
+                this.Latch = latch;
+                this.Signal = signal;
+
+            }
+
+            public override void UpdateDocument(Term term, IEnumerable<IIndexableField> doc, Analyzer analyzer)
+            {
+                base.UpdateDocument(term, doc, analyzer);
+                try
+                {
+                    if (WaitAfterUpdate)
+                    {
+                        Signal.Reset(Signal.CurrentCount == 0 ? 0 : Signal.CurrentCount - 1);
+                        Latch.Wait();
+                    }
+                }
+#pragma warning disable 168
+                catch (ThreadInterruptedException e)
+#pragma warning restore 168
+                {
+                    throw;
+                }
+            }
+        }
+
+        [Test]
+        public virtual void TestEvilSearcherFactory()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            w.Commit();
+
+            IndexReader other = DirectoryReader.Open(dir);
+
+            SearcherFactory theEvilOne = new SearcherFactoryAnonymousInnerClassHelper2(this, other);
+
+            try
+            {
+                new SearcherManager(w.w, false, theEvilOne);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (InvalidOperationException ise)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            other.Dispose();
+            dir.Dispose();
+        }
+
+        private class SearcherFactoryAnonymousInnerClassHelper2 : SearcherFactory
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private IndexReader Other;
+
+            public SearcherFactoryAnonymousInnerClassHelper2(TestControlledRealTimeReopenThread outerInstance, IndexReader other)
+            {
+                this.OuterInstance = outerInstance;
+                this.Other = other;
+            }
+
+            public override IndexSearcher NewSearcher(IndexReader ignored)
+            {
+                return OuterInstance.NewSearcher(Other);
+            }
+        }
+
+        [Test]
+        public virtual void TestListenerCalled()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
+            AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
+            SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
+            sm.AddListener(new RefreshListenerAnonymousInnerClassHelper(this, afterRefreshCalled));
+            iw.AddDocument(new Document());
+            iw.Commit();
+            Assert.IsFalse(afterRefreshCalled.Get());
+            sm.MaybeRefreshBlocking();
+            Assert.IsTrue(afterRefreshCalled.Get());
+            sm.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class RefreshListenerAnonymousInnerClassHelper : ReferenceManager.IRefreshListener
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private AtomicBoolean AfterRefreshCalled;
+
+            public RefreshListenerAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, AtomicBoolean afterRefreshCalled)
+            {
+                this.OuterInstance = outerInstance;
+                this.AfterRefreshCalled = afterRefreshCalled;
+            }
+
+            public void BeforeRefresh()
+            {
+            }
+            public void AfterRefresh(bool didRefresh)
+            {
+                if (didRefresh)
+                {
+                    AfterRefreshCalled.Set(true);
+                }
+            }
+        }
+
+        // LUCENE-5461
+        [Test, Timeout(120000)]
+        public virtual void TestCRTReopen()
+        {
+            //test behaving badly
+
+            //should be high enough
+            int maxStaleSecs = 20;
+
+            //build crap data just to store it.
+            string s = "        abcdefghijklmnopqrstuvwxyz     ";
+            char[] chars = s.ToCharArray();
+            StringBuilder builder = new StringBuilder(2048);
+            for (int i = 0; i < 2048; i++)
+            {
+                builder.Append(chars[Random().Next(chars.Length)]);
+            }
+            string content = builder.ToString();
+
+            SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
+            Directory dir = new NRTCachingDirectory(NewFSDirectory(CreateTempDir("nrt")), 5, 128);
+            IndexWriterConfig config = new IndexWriterConfig(
+#pragma warning disable 612, 618
+                Version.LUCENE_46,
+#pragma warning restore 612, 618
+                new MockAnalyzer(Random()));
+            config.SetIndexDeletionPolicy(sdp);
+            config.SetOpenMode(OpenMode.CREATE_OR_APPEND);
+            IndexWriter iw = new IndexWriter(dir, config);
+            SearcherManager sm = new SearcherManager(iw, true, new SearcherFactory());
+            TrackingIndexWriter tiw = new TrackingIndexWriter(iw);
+            ControlledRealTimeReopenThread<IndexSearcher> controlledRealTimeReopenThread = new ControlledRealTimeReopenThread<IndexSearcher>(tiw, sm, maxStaleSecs, 0);
+
+            controlledRealTimeReopenThread.SetDaemon(true);
+            controlledRealTimeReopenThread.Start();
+
+            IList<ThreadClass> commitThreads = new List<ThreadClass>();
+
+            for (int i = 0; i < 500; i++)
+            {
+                if (i > 0 && i % 50 == 0)
+                {
+                    ThreadClass commitThread = new RunnableAnonymousInnerClassHelper(this, sdp, dir, iw);
+                    commitThread.Start();
+                    commitThreads.Add(commitThread);
+                }
+                Document d = new Document();
+                d.Add(new TextField("count", i + "", Field.Store.NO));
+                d.Add(new TextField("content", content, Field.Store.YES));
+                long start = DateTime.Now.Millisecond;
+                long l = tiw.AddDocument(d);
+                controlledRealTimeReopenThread.WaitForGeneration(l);
+                long wait = DateTime.Now.Millisecond - start;
+                Assert.IsTrue(wait < (maxStaleSecs * 1000), "waited too long for generation " + wait);
+                IndexSearcher searcher = sm.Acquire();
+                TopDocs td = searcher.Search(new TermQuery(new Term("count", i + "")), 10);
+                sm.Release(searcher);
+                Assert.AreEqual(1, td.TotalHits);
+            }
+
+            foreach (ThreadClass commitThread in commitThreads)
+            {
+                commitThread.Join();
+            }
+
+            controlledRealTimeReopenThread.Dispose();
+            sm.Dispose();
+            iw.Dispose();
+            dir.Dispose();
+        }
+
+        private class RunnableAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestControlledRealTimeReopenThread OuterInstance;
+
+            private SnapshotDeletionPolicy Sdp;
+            private Directory Dir;
+            private IndexWriter Iw;
+
+            public RunnableAnonymousInnerClassHelper(TestControlledRealTimeReopenThread outerInstance, SnapshotDeletionPolicy sdp, Directory dir, IndexWriter iw)
+            {
+                this.OuterInstance = outerInstance;
+                this.Sdp = sdp;
+                this.Dir = dir;
+                this.Iw = iw;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    Iw.Commit();
+                    IndexCommit ic = Sdp.Snapshot();
+                    foreach (string name in ic.FileNames)
+                    {
+                        //distribute, and backup
+                        //System.out.println(names);
+                        Assert.IsTrue(SlowFileExists(Dir, name));
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file


Mime
View raw message