lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From synhers...@apache.org
Subject [39/50] [abbrv] lucenenet git commit: Ported Lucene.Net.Memory + tests
Date Sun, 02 Oct 2016 14:35:57 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d47b6088/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs b/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
new file mode 100644
index 0000000..8f4daa3
--- /dev/null
+++ b/src/Lucene.Net.Tests.Memory/Index/Memory/MemoryIndexTest.cs
@@ -0,0 +1,584 @@
+using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.Tokenattributes;
+using Lucene.Net.Codecs.Lucene41;
+using Lucene.Net.Documents;
+using Lucene.Net.Index;
+using Lucene.Net.Index.Memory;
+using Lucene.Net.QueryParsers.Classic;
+using Lucene.Net.Search;
+using Lucene.Net.Search.Spans;
+using Lucene.Net.Store;
+using Lucene.Net.Util;
+using NUnit.Framework;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+
+namespace Lucene.Net.Index.Memory
+{
+    public class MemoryIndexTest : BaseTokenStreamTestCase
+    {
+        private ISet<string> queries = new HashSet<string>();
+
+        public static readonly int ITERATIONS = 100 * RANDOM_MULTIPLIER;
+
+
+        public override void SetUp()
+        {
+            base.SetUp();
+            queries.addAll(ReadQueries("testqueries.txt"));
+            queries.addAll(ReadQueries("testqueries2.txt"));
+        }
+
+        /**
+         * read a set of queries from a resource file
+         */
+        private ISet<string> ReadQueries(string resource)
+        {
+            ISet<string> queries = new HashSet<string>();
+            Stream stream = GetType().getResourceAsStream(resource);
+            TextReader reader = new StreamReader(stream, Encoding.UTF8);
+            String line = null;
+            while ((line = reader.ReadLine()) != null)
+            {
+                line = line.Trim();
+                if (line.Length > 0 && !line.StartsWith("#", StringComparison.Ordinal)
&& !line.StartsWith("//", StringComparison.Ordinal))
+                {
+                    queries.add(line);
+                }
+            }
+            return queries;
+        }
+
+
+        /**
+         * runs random tests, up to ITERATIONS times.
+         */
+         [Test]
+        public void TestRandomQueries()
+        {
+            MemoryIndex index = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            for (int i = 0; i < ITERATIONS; i++)
+            {
+                AssertAgainstRAMDirectory(index);
+            }
+        }
+
+        /**
+         * Build a randomish document for both RAMDirectory and MemoryIndex,
+         * and run all the queries against it.
+         */
+        public void AssertAgainstRAMDirectory(MemoryIndex memory)
+        {
+            memory.Reset();
+            StringBuilder fooField = new StringBuilder();
+            StringBuilder termField = new StringBuilder();
+
+            // add up to 250 terms to field "foo"
+            int numFooTerms = Random().nextInt(250 * RANDOM_MULTIPLIER);
+            for (int i = 0; i < numFooTerms; i++)
+            {
+                fooField.append(" ");
+                fooField.append(RandomTerm());
+            }
+
+            // add up to 250 terms to field "term"
+            int numTermTerms = Random().nextInt(250 * RANDOM_MULTIPLIER);
+            for (int i = 0; i < numTermTerms; i++)
+            {
+                termField.append(" ");
+                termField.append(RandomTerm());
+            }
+
+            Store.Directory ramdir = new RAMDirectory();
+            Analyzer analyzer = RandomAnalyzer();
+            IndexWriter writer = new IndexWriter(ramdir,
+                                                 new IndexWriterConfig(TEST_VERSION_CURRENT,
analyzer).SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())));
+            Document doc = new Document();
+            Field field1 = NewTextField("foo", fooField.toString(), Field.Store.NO);
+            Field field2 = NewTextField("term", termField.toString(), Field.Store.NO);
+            doc.Add(field1);
+            doc.Add(field2);
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            memory.AddField("foo", fooField.toString(), analyzer);
+            memory.AddField("term", termField.toString(), analyzer);
+
+            if (VERBOSE)
+            {
+                Console.WriteLine("Random MemoryIndex:\n" + memory.toString());
+                Console.WriteLine("Same index as RAMDirectory: " +
+                  RamUsageEstimator.HumanReadableUnits(RamUsageEstimator.SizeOf(ramdir)));
+                Console.WriteLine();
+            }
+            else
+            {
+                assertTrue(memory.MemorySize > 0L);
+            }
+            AtomicReader reader = (AtomicReader)memory.CreateSearcher().IndexReader;
+            DirectoryReader competitor = DirectoryReader.Open(ramdir);
+            DuellReaders(competitor, reader);
+            IOUtils.Close(reader, competitor);
+            AssertAllQueries(memory, ramdir, analyzer);
+            ramdir.Dispose();
+        }
+
+        private void DuellReaders(CompositeReader other, AtomicReader memIndexReader)
+        {
+            AtomicReader competitor = SlowCompositeReaderWrapper.Wrap(other);
+            Fields memFields = memIndexReader.Fields;
+            foreach (string field in competitor.Fields)
+            {
+                Terms memTerms = memFields.Terms(field);
+                Terms iwTerms = memIndexReader.Terms(field);
+                if (iwTerms == null)
+                {
+                    assertNull(memTerms);
+                }
+                else
+                {
+                    NumericDocValues normValues = competitor.GetNormValues(field);
+                    NumericDocValues memNormValues = memIndexReader.GetNormValues(field);
+                    if (normValues != null)
+                    {
+                        // mem idx always computes norms on the fly
+                        assertNotNull(memNormValues);
+                        assertEquals(normValues.Get(0), memNormValues.Get(0));
+                    }
+
+                    assertNotNull(memTerms);
+                    assertEquals(iwTerms.DocCount, memTerms.DocCount);
+                    assertEquals(iwTerms.SumDocFreq, memTerms.SumDocFreq);
+                    assertEquals(iwTerms.SumTotalTermFreq, memTerms.SumTotalTermFreq);
+                    TermsEnum iwTermsIter = iwTerms.Iterator(null);
+                    TermsEnum memTermsIter = memTerms.Iterator(null);
+                    if (iwTerms.HasPositions())
+                    {
+                        bool offsets = iwTerms.HasOffsets() && memTerms.HasOffsets();
+
+                        while (iwTermsIter.Next() != null)
+                        {
+                            assertNotNull(memTermsIter.Next());
+                            assertEquals(iwTermsIter.Term(), memTermsIter.Term());
+                            DocsAndPositionsEnum iwDocsAndPos = iwTermsIter.DocsAndPositions(null,
null);
+                            DocsAndPositionsEnum memDocsAndPos = memTermsIter.DocsAndPositions(null,
null);
+                            while (iwDocsAndPos.NextDoc() != DocsAndPositionsEnum.NO_MORE_DOCS)
+                            {
+                                assertEquals(iwDocsAndPos.DocID(), memDocsAndPos.NextDoc());
+                                assertEquals(iwDocsAndPos.Freq(), memDocsAndPos.Freq());
+                                for (int i = 0; i < iwDocsAndPos.Freq(); i++)
+                                {
+                                    assertEquals("term: " + iwTermsIter.Term().Utf8ToString(),
iwDocsAndPos.NextPosition(), memDocsAndPos.NextPosition());
+                                    if (offsets)
+                                    {
+                                        assertEquals(iwDocsAndPos.StartOffset(), memDocsAndPos.StartOffset());
+                                        assertEquals(iwDocsAndPos.EndOffset(), memDocsAndPos.EndOffset());
+                                    }
+                                }
+
+                            }
+
+                        }
+                    }
+                    else
+                    {
+                        while (iwTermsIter.Next() != null)
+                        {
+                            assertEquals(iwTermsIter.Term(), memTermsIter.Term());
+                            DocsEnum iwDocsAndPos = iwTermsIter.Docs(null, null);
+                            DocsEnum memDocsAndPos = memTermsIter.Docs(null, null);
+                            while (iwDocsAndPos.NextDoc() != DocsAndPositionsEnum.NO_MORE_DOCS)
+                            {
+                                assertEquals(iwDocsAndPos.DocID(), memDocsAndPos.NextDoc());
+                                assertEquals(iwDocsAndPos.Freq(), memDocsAndPos.Freq());
+                            }
+                        }
+                    }
+                }
+
+            }
+        }
+
+        /**
+         * Run all queries against both the RAMDirectory and MemoryIndex, ensuring they are
the same.
+         */
+        public void AssertAllQueries(MemoryIndex memory, Store.Directory ramdir, Analyzer
analyzer)
+        {
+            IndexReader reader = DirectoryReader.Open(ramdir);
+            IndexSearcher ram = NewSearcher(reader);
+            IndexSearcher mem = memory.CreateSearcher();
+            QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "foo", analyzer);
+            foreach (string query in queries)
+            {
+                TopDocs ramDocs = ram.Search(qp.Parse(query), 1);
+                TopDocs memDocs = mem.Search(qp.Parse(query), 1);
+                assertEquals(query, ramDocs.TotalHits, memDocs.TotalHits);
+            }
+            reader.Dispose();
+        }
+
+        internal class RandomAnalyzerHelper : Analyzer
+        {
+            private readonly MemoryIndexTest outerInstance;
+            public RandomAnalyzerHelper(MemoryIndexTest outerInstance)
+            {
+                this.outerInstance = outerInstance;
+            }
+
+            public override TokenStreamComponents CreateComponents(string fieldName, TextReader
reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader);
+                return new TokenStreamComponents(tokenizer, new CrazyTokenFilter(tokenizer));
+            }
+        }
+
+        /**
+         * Return a random analyzer (Simple, Stop, Standard) to analyze the terms.
+         */
+        private Analyzer RandomAnalyzer()
+        {
+            switch (Random().nextInt(4))
+            {
+                case 0: return new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true);
+                case 1: return new MockAnalyzer(Random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET);
+                case 2: return new RandomAnalyzerHelper(this);
+                //            return new Analyzer() {
+
+                //        protected TokenStreamComponents createComponents(string fieldName,
TextReader reader)
+                //{
+                //    Tokenizer tokenizer = new MockTokenizer(reader);
+                //    return new TokenStreamComponents(tokenizer, new CrazyTokenFilter(tokenizer));
+                //}
+                //      };
+                default: return new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false);
+            }
+        }
+
+
+
+        // a tokenfilter that makes all terms starting with 't' empty strings
+        internal sealed class CrazyTokenFilter : TokenFilter
+        {
+            private readonly ICharTermAttribute termAtt;
+
+
+            public CrazyTokenFilter(TokenStream input)
+                : base(input)
+            {
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            public override bool IncrementToken()
+            {
+                if (input.IncrementToken())
+                {
+                    if (termAtt.Length > 0 && termAtt.Buffer()[0] == 't')
+                    {
+                        termAtt.SetLength(0);
+                    }
+                    return true;
+                }
+                else
+                {
+                    return false;
+                }
+            }
+        };
+
+        /**
+         * Some terms to be indexed, in addition to random words. 
+         * These terms are commonly used in the queries. 
+         */
+        private static readonly string[] TEST_TERMS = {"term", "Term", "tErm", "TERM",
+            "telm", "stop", "drop", "roll", "phrase", "a", "c", "bar", "blar",
+            "gack", "weltbank", "worlbank", "hello", "on", "the", "apache", "Apache",
+            "copyright", "Copyright"};
+
+
+        /**
+         * half of the time, returns a random term from TEST_TERMS.
+         * the other half of the time, returns a random unicode string.
+         */
+        private string RandomTerm()
+        {
+            if (Random().nextBoolean())
+            {
+                // return a random TEST_TERM
+                return TEST_TERMS[Random().nextInt(TEST_TERMS.Length)];
+            }
+            else
+            {
+                // return a random unicode term
+                return TestUtil.RandomUnicodeString(Random());
+            }
+        }
+
+        [Test]
+        public void TestDocsEnumStart()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            MemoryIndex memory = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            memory.AddField("foo", "bar", analyzer);
+            AtomicReader reader = (AtomicReader)memory.CreateSearcher().IndexReader;
+            DocsEnum disi = TestUtil.Docs(Random(), reader, "foo", new BytesRef("bar"), null,
null, DocsEnum.FLAG_NONE);
+            int docid = disi.DocID();
+            assertEquals(-1, docid);
+            assertTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            // now reuse and check again
+            TermsEnum te = reader.Terms("foo").Iterator(null);
+            assertTrue(te.SeekExact(new BytesRef("bar")));
+            disi = te.Docs(null, disi, DocsEnum.FLAG_NONE);
+            docid = disi.DocID();
+            assertEquals(-1, docid);
+            assertTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            reader.Dispose();
+        }
+
+        private ByteBlockPool.Allocator RandomByteBlockAllocator()
+        {
+            if (Random().nextBoolean())
+            {
+                return new RecyclingByteBlockAllocator();
+            }
+            else
+            {
+                return new ByteBlockPool.DirectAllocator();
+            }
+        }
+
+        [Test]
+        public void RestDocsAndPositionsEnumStart()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+            int numIters = AtLeast(3);
+            MemoryIndex memory = new MemoryIndex(true, Random().nextInt(50) * 1024 * 1024);
+            for (int i = 0; i < numIters; i++)
+            { // check reuse
+                memory.AddField("foo", "bar", analyzer);
+                AtomicReader reader = (AtomicReader)memory.CreateSearcher().IndexReader;
+                assertEquals(1, reader.Terms("foo").SumTotalTermFreq);
+                DocsAndPositionsEnum disi = reader.TermPositionsEnum(new Term("foo", "bar"));
+                int docid = disi.DocID();
+                assertEquals(-1, docid);
+                assertTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                assertEquals(0, disi.NextPosition());
+                assertEquals(0, disi.StartOffset());
+                assertEquals(3, disi.EndOffset());
+
+                // now reuse and check again
+                TermsEnum te = reader.Terms("foo").Iterator(null);
+                assertTrue(te.SeekExact(new BytesRef("bar")));
+                disi = te.DocsAndPositions(null, disi);
+                docid = disi.DocID();
+                assertEquals(-1, docid);
+                assertTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                reader.Dispose();
+                memory.Reset();
+            }
+        }
+
+        // LUCENE-3831
+        [Test]
+        public void TestNullPointerException()
+        {
+            RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
+            SpanQuery wrappedquery = new SpanMultiTermQueryWrapper<RegexpQuery>(regex);
+
+            MemoryIndex mindex = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            mindex.AddField("field", new MockAnalyzer(Random()).TokenStream("field", "hello
there"));
+
+            // This throws an NPE
+            assertEquals(0, mindex.Search(wrappedquery), 0.00001f);
+        }
+
+        // LUCENE-3831
+        [Test]
+        public void TestPassesIfWrapped()
+        {
+            RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
+            SpanQuery wrappedquery = new SpanOrQuery(new SpanMultiTermQueryWrapper<RegexpQuery>(regex));
+
+            MemoryIndex mindex = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            mindex.AddField("field", new MockAnalyzer(Random()).TokenStream("field", "hello
there"));
+
+            // This passes though
+            assertEquals(0, mindex.Search(wrappedquery), 0.00001f);
+        }
+
+        [Test]
+        public void TestSameFieldAddedMultipleTimes()
+        {
+            MemoryIndex mindex = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            MockAnalyzer mockAnalyzer = new MockAnalyzer(Random());
+            mindex.AddField("field", "the quick brown fox", mockAnalyzer);
+            mindex.AddField("field", "jumps over the", mockAnalyzer);
+            AtomicReader reader = (AtomicReader)mindex.CreateSearcher().IndexReader;
+            assertEquals(7, reader.Terms("field").SumTotalTermFreq);
+            PhraseQuery query = new PhraseQuery();
+            query.Add(new Term("field", "fox"));
+            query.Add(new Term("field", "jumps"));
+            assertTrue(mindex.Search(query) > 0.1);
+            mindex.Reset();
+            mockAnalyzer.PositionIncrementGap = (1 + Random().nextInt(10));
+            mindex.AddField("field", "the quick brown fox", mockAnalyzer);
+            mindex.AddField("field", "jumps over the", mockAnalyzer);
+            assertEquals(0, mindex.Search(query), 0.00001f);
+            query.Slop = (10);
+            assertTrue("posGap" + mockAnalyzer.GetPositionIncrementGap("field"), mindex.Search(query)
> 0.0001);
+        }
+
+        [Test]
+        public void TestNonExistingsField()
+        {
+            MemoryIndex mindex = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            MockAnalyzer mockAnalyzer = new MockAnalyzer(Random());
+            mindex.AddField("field", "the quick brown fox", mockAnalyzer);
+            AtomicReader reader = (AtomicReader)mindex.CreateSearcher().IndexReader;
+            assertNull(reader.GetNumericDocValues("not-in-index"));
+            assertNull(reader.GetNormValues("not-in-index"));
+            assertNull(reader.TermDocsEnum(new Term("not-in-index", "foo")));
+            assertNull(reader.TermPositionsEnum(new Term("not-in-index", "foo")));
+            assertNull(reader.Terms("not-in-index"));
+        }
+
+        [Test]
+        public void TestDuellMemIndex()
+        {
+            LineFileDocs lineFileDocs = new LineFileDocs(Random());
+            int numDocs = AtLeast(10);
+            MemoryIndex memory = new MemoryIndex(Random().nextBoolean(), Random().nextInt(50)
* 1024 * 1024);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Store.Directory dir = NewDirectory();
+                MockAnalyzer mockAnalyzer = new MockAnalyzer(Random());
+                mockAnalyzer.MaxTokenLength = (TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH));
+                IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(Random(),
TEST_VERSION_CURRENT, mockAnalyzer));
+                Document nextDoc = lineFileDocs.NextDoc();
+                Document doc = new Document();
+                foreach (IndexableField field in nextDoc.Fields)
+                {
+                    if (field.FieldType.Indexed)
+                    {
+                        doc.Add(field);
+                        if (Random().nextInt(3) == 0)
+                        {
+                            doc.Add(field);  // randomly add the same field twice
+                        }
+                    }
+                }
+
+                writer.AddDocument(doc);
+                writer.Dispose();
+                foreach (IndexableField field in doc.Fields)
+                {
+                    memory.AddField(field.Name, ((Field)field).StringValue, mockAnalyzer);
+                }
+                DirectoryReader competitor = DirectoryReader.Open(dir);
+                AtomicReader memIndexReader = (AtomicReader)memory.CreateSearcher().IndexReader;
+                DuellReaders(competitor, memIndexReader);
+                IOUtils.Close(competitor, memIndexReader);
+                memory.Reset();
+                dir.Dispose();
+            }
+            lineFileDocs.Dispose();
+        }
+
+        // LUCENE-4880
+        [Test]
+        public void TestEmptyString()
+        {
+            MemoryIndex memory = new MemoryIndex();
+            memory.AddField("foo", new CannedTokenStream(new Analysis.Token("", 0, 5)));
+            IndexSearcher searcher = memory.CreateSearcher();
+            TopDocs docs = searcher.Search(new TermQuery(new Term("foo", "")), 10);
+            assertEquals(1, docs.TotalHits);
+        }
+
+        [Test]
+        public void TestDuelMemoryIndexCoreDirectoryWithArrayField()
+        {
+
+            string field_name = "text";
+            MockAnalyzer mockAnalyzer = new MockAnalyzer(Random());
+            if (Random().nextBoolean())
+            {
+                mockAnalyzer.OffsetGap = (Random().nextInt(100));
+            }
+            //index into a random directory
+            FieldType type = new FieldType(TextField.TYPE_STORED);
+            type.StoreTermVectorOffsets = (true);
+            type.StoreTermVectorPayloads = (false);
+            type.StoreTermVectorPositions = (true);
+            type.StoreTermVectors = (true);
+            type.Freeze();
+
+            Document doc = new Document();
+            doc.Add(new Field(field_name, "la la", type));
+            doc.Add(new Field(field_name, "foo bar foo bar foo", type));
+
+            Store.Directory dir = NewDirectory();
+            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(Random(), TEST_VERSION_CURRENT,
mockAnalyzer));
+            writer.UpdateDocument(new Term("id", "1"), doc);
+            writer.Commit();
+            writer.Dispose();
+            DirectoryReader reader = DirectoryReader.Open(dir);
+
+            //Index document in Memory index
+            MemoryIndex memIndex = new MemoryIndex(true);
+            memIndex.AddField(field_name, "la la", mockAnalyzer);
+            memIndex.AddField(field_name, "foo bar foo bar foo", mockAnalyzer);
+
+            //compare term vectors
+            Terms ramTv = reader.GetTermVector(0, field_name);
+            IndexReader memIndexReader = memIndex.CreateSearcher().IndexReader;
+            Terms memTv = memIndexReader.GetTermVector(0, field_name);
+
+            CompareTermVectors(ramTv, memTv, field_name);
+            memIndexReader.Dispose();
+            reader.Dispose();
+            dir.Dispose();
+
+        }
+
+        protected void CompareTermVectors(Terms terms, Terms memTerms, string field_name)
+        {
+
+            TermsEnum termEnum = terms.Iterator(null);
+            TermsEnum memTermEnum = memTerms.Iterator(null);
+
+            while (termEnum.Next() != null)
+            {
+                assertNotNull(memTermEnum.Next());
+
+                assertEquals(termEnum.TotalTermFreq(), memTermEnum.TotalTermFreq());
+
+                DocsAndPositionsEnum docsPosEnum = termEnum.DocsAndPositions(null, null,
0);
+                DocsAndPositionsEnum memDocsPosEnum = memTermEnum.DocsAndPositions(null,
null, 0);
+                String currentTerm = termEnum.Term().Utf8ToString();
+
+
+                assertEquals("Token mismatch for field: " + field_name, currentTerm, memTermEnum.Term().Utf8ToString());
+
+                docsPosEnum.NextDoc();
+                memDocsPosEnum.NextDoc();
+
+                int freq = docsPosEnum.Freq();
+                assertEquals(freq, memDocsPosEnum.Freq());
+                for (int i = 0; i < freq; i++)
+                {
+                    string failDesc = " (field:" + field_name + " term:" + currentTerm +
")";
+                    int memPos = memDocsPosEnum.NextPosition();
+                    int pos = docsPosEnum.NextPosition();
+                    assertEquals("Position test failed" + failDesc, memPos, pos);
+                    assertEquals("Start offset test failed" + failDesc, memDocsPosEnum.StartOffset(),
docsPosEnum.StartOffset());
+                    assertEquals("End offset test failed" + failDesc, memDocsPosEnum.EndOffset(),
docsPosEnum.EndOffset());
+                    assertEquals("Missing payload test failed" + failDesc, docsPosEnum.Payload,
null);
+                }
+            }
+            assertNull("Still some tokens not processed", memTermEnum.Next());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d47b6088/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries.txt
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries.txt b/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries.txt
new file mode 100644
index 0000000..f6d27ef
--- /dev/null
+++ b/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries.txt
@@ -0,0 +1,129 @@
+#
+# queries extracted from TestQueryParser.java
+#
+Apache
+Apach~ AND Copy*
+
+a AND b
+(a AND b)
+c OR (a AND b)
+a AND NOT b
+a AND -b
+a AND !b
+a && b
+a && ! b
+
+a OR b
+a || b
+a OR !b
+a OR ! b
+a OR -b
+
++term -term term
+foo:term AND field:anotherTerm
+term AND "phrase phrase"
+"hello there"
+
+germ term^2.0
+(term)^2.0
+(germ term)^2.0
+term^2.0
+term^2
+"germ term"^2.0
+"term germ"^2
+
+(foo OR bar) AND (baz OR boo)
+((a OR b) AND NOT c) OR d
++(apple "steve jobs") -(foo bar baz)
++title:(dog OR cat) -author:"bob dole"
+
+
+a&b
+a&&b
+.NET
+
+"term germ"~2
+"term germ"~2 flork
+"term"~2
+"~2 germ"
+"term germ"~2^2
+
+3
+term 1.0 1 2
+term term1 term2
+
+term*
+term*^2
+term~
+term~0.7
+term~^2
+term^2~
+term*germ
+term*germ^3
+
+
+term*
+Term*
+TERM*
+term*
+Term*
+TERM*
+
+// Then 'full' wildcard queries:
+te?m
+Te?m
+TE?M
+Te?m*gerM
+te?m
+Te?m
+TE?M
+Te?m*gerM
+
+term term term
+term +stop term
+term -stop term
+drop AND stop AND roll
+term phrase term
+term AND NOT phrase term
+stop
+
+
+[ a TO c]
+[ a TO c ]
+{ a TO c}
+{ a TO c }
+{ a TO c }^2.0
+[ a TO c] OR bar
+[ a TO c] AND bar
+( bar blar { a TO c}) 
+gack ( bar blar { a TO c}) 
+
+
++weltbank +worlbank
++weltbank\n+worlbank
+weltbank \n+worlbank
+weltbank \n +worlbank
++weltbank\r+worlbank
+weltbank \r+worlbank
+weltbank \r +worlbank
++weltbank\r\n+worlbank
+weltbank \r\n+worlbank
+weltbank \r\n +worlbank
+weltbank \r \n +worlbank
++weltbank\t+worlbank
+weltbank \t+worlbank
+weltbank \t +worlbank
+
+
+term term term
+term +term term
+term term +term
+term +term +term
+-term term term
+
+
+on^1.0
+"hello"^2.0
+hello^2.0
+"on"^1.0
+the^3

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d47b6088/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries2.txt
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries2.txt b/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries2.txt
new file mode 100644
index 0000000..6bd341f
--- /dev/null
+++ b/src/Lucene.Net.Tests.Memory/Index/Memory/testqueries2.txt
@@ -0,0 +1,5 @@
+term
+term*
+term~
+Apache
+Apach~ AND Copy*

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d47b6088/src/Lucene.Net.Tests.Memory/Lucene.Net.Tests.Memory.csproj
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/Lucene.Net.Tests.Memory.csproj b/src/Lucene.Net.Tests.Memory/Lucene.Net.Tests.Memory.csproj
new file mode 100644
index 0000000..158b3c1
--- /dev/null
+++ b/src/Lucene.Net.Tests.Memory/Lucene.Net.Tests.Memory.csproj
@@ -0,0 +1,86 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props"
Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')"
/>
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProjectGuid>{7F9378BF-C88D-46FF-9AE8-5E7D8C0225D3}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Lucene.Net.Tests.Memory</RootNamespace>
+    <AssemblyName>Lucene.Net.Tests.Memory</AssemblyName>
+    <TargetFrameworkVersion>v4.5.1</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>bin\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>pdbonly</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>bin\Release\</OutputPath>
+    <DefineConstants>TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="nunit.framework, Version=2.6.3.13283, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77,
processorArchitecture=MSIL">
+      <HintPath>..\..\packages\NUnit.2.6.3\lib\nunit.framework.dll</HintPath>
+      <Private>True</Private>
+    </Reference>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="Microsoft.CSharp" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Net.Http" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="Index\Memory\MemoryIndexTest.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\Lucene.Net.Core\Lucene.Net.csproj">
+      <Project>{5d4ad9be-1ffb-41ab-9943-25737971bf57}</Project>
+      <Name>Lucene.Net</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.Memory\Lucene.Net.Memory.csproj">
+      <Project>{42ecf239-afc1-427d-921e-b5a277809cf0}</Project>
+      <Name>Lucene.Net.Memory</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.QueryParser\Lucene.Net.QueryParser.csproj">
+      <Project>{949ba34b-6ae6-4ce3-b578-61e13e4d76bf}</Project>
+      <Name>Lucene.Net.QueryParser</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\Lucene.Net.TestFramework\Lucene.Net.TestFramework.csproj">
+      <Project>{B2C0D749-CE34-4F62-A15E-00CB2FF5DDB3}</Project>
+      <Name>Lucene.Net.TestFramework</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <ItemGroup>
+    <None Include="packages.config" />
+  </ItemGroup>
+  <ItemGroup>
+    <Service Include="{82A7F48D-3B50-4B1E-B82E-3ADA8210C358}" />
+  </ItemGroup>
+  <ItemGroup>
+    <EmbeddedResource Include="Index\Memory\testqueries.txt" />
+    <EmbeddedResource Include="Index\Memory\testqueries2.txt" />
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and
uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d47b6088/src/Lucene.Net.Tests.Memory/Properties/AssemblyInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/Properties/AssemblyInfo.cs b/src/Lucene.Net.Tests.Memory/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..796f718
--- /dev/null
+++ b/src/Lucene.Net.Tests.Memory/Properties/AssemblyInfo.cs
@@ -0,0 +1,35 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following 
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("Lucene.Net.Tests.Memory")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("Lucene.Net.Tests.Memory")]
+[assembly: AssemblyCopyright("Copyright ©  2016")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible 
+// to COM components.  If you need to access a type in this assembly from 
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("7f9378bf-c88d-46ff-9ae8-5e7d8c0225d3")]
+
+// Version information for an assembly consists of the following four values:
+//
+//      Major Version
+//      Minor Version 
+//      Build Number
+//      Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers 
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("4.8.0")]

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/d47b6088/src/Lucene.Net.Tests.Memory/packages.config
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Memory/packages.config b/src/Lucene.Net.Tests.Memory/packages.config
new file mode 100644
index 0000000..139d513
--- /dev/null
+++ b/src/Lucene.Net.Tests.Memory/packages.config
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<packages>
+  <package id="NUnit" version="2.6.3" targetFramework="net451" />
+</packages>
\ No newline at end of file


Mime
View raw message