lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [35/72] [abbrv] [partial] lucenenet git commit: Lucene.Net.Tests: Removed \core directory and put its contents in root directory
Date Sun, 26 Feb 2017 23:37:23 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
new file mode 100644
index 0000000..f46110b
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesIndexing.cs
@@ -0,0 +1,982 @@
+using System;
+using System.Threading;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.Threading;
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using Analyzer = Lucene.Net.Analysis.Analyzer;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using Field = Field;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using SortedSetDocValuesField = SortedSetDocValuesField;
+    using StringField = StringField;
+    using TextField = TextField;
+
+    ///
+    /// <summary>
+    /// Tests DocValues integration into IndexWriter
+    ///
+    /// </summary>
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestDocValuesIndexing : LuceneTestCase
+    {
+        /*
+         * - add test for multi segment case with deletes
+         * - add multithreaded tests / integrate into stress indexing?
+         */
+
+        [Test]
+        public virtual void TestAddIndexes()
+        {
+            Directory d1 = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d1, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("id", "1", Field.Store.YES));
+            doc.Add(new NumericDocValuesField("dv", 1));
+            w.AddDocument(doc);
+            IndexReader r1 = w.Reader;
+            w.Dispose();
+
+            Directory d2 = NewDirectory();
+            w = new RandomIndexWriter(Random(), d2, Similarity, TimeZone);
+            doc = new Document();
+            doc.Add(NewStringField("id", "2", Field.Store.YES));
+            doc.Add(new NumericDocValuesField("dv", 2));
+            w.AddDocument(doc);
+            IndexReader r2 = w.Reader;
+            w.Dispose();
+
+            Directory d3 = NewDirectory();
+            w = new RandomIndexWriter(Random(), d3, Similarity, TimeZone);
+            w.AddIndexes(SlowCompositeReaderWrapper.Wrap(r1), SlowCompositeReaderWrapper.Wrap(r2));
+            r1.Dispose();
+            d1.Dispose();
+            r2.Dispose();
+            d2.Dispose();
+
+            w.ForceMerge(1);
+            DirectoryReader r3 = w.Reader;
+            w.Dispose();
+            AtomicReader sr = GetOnlySegmentReader(r3);
+            Assert.AreEqual(2, sr.NumDocs);
+            NumericDocValues docValues = sr.GetNumericDocValues("dv");
+            Assert.IsNotNull(docValues);
+            r3.Dispose();
+            d3.Dispose();
+        }
+
+        [Test]
+        public virtual void TestMultiValuedDocValuesField()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            Field f = new NumericDocValuesField("field", 17);
+            // Index doc values are single-valued so we should not
+            // be able to add same field more than once:
+            doc.Add(f);
+            doc.Add(f);
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            doc = new Document();
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(17, FieldCache.DEFAULT.GetInt32s(GetOnlySegmentReader(r), "field", false).Get(0));
+            r.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDifferentTypedDocValuesField()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            // Index doc values are single-valued so we should not
+            // be able to add same field more than once:
+            Field f;
+            doc.Add(f = new NumericDocValuesField("field", 17));
+            doc.Add(new BinaryDocValuesField("field", new BytesRef("blah")));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            doc = new Document();
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            w.Dispose();
+            Assert.AreEqual(17, FieldCache.DEFAULT.GetInt32s(GetOnlySegmentReader(r), "field", false).Get(0));
+            r.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDifferentTypedDocValuesField2()
+        {
+            Directory d = NewDirectory();
+            RandomIndexWriter w = new RandomIndexWriter(Random(), d, Similarity, TimeZone);
+            Document doc = new Document();
+            // Index doc values are single-valued so we should not
+            // be able to add same field more than once:
+            Field f = new NumericDocValuesField("field", 17);
+            doc.Add(f);
+            doc.Add(new SortedDocValuesField("field", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            doc = new Document();
+            doc.Add(f);
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            Assert.AreEqual(17, GetOnlySegmentReader(r).GetNumericDocValues("field").Get(0));
+            r.Dispose();
+            w.Dispose();
+            d.Dispose();
+        }
+
+        // LUCENE-3870
+        [Test]
+        public virtual void TestLengthPrefixAcrossTwoPages()
+        {
+            Directory d = NewDirectory();
+            IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            var bytes = new byte[32764];
+            BytesRef b = new BytesRef();
+            b.Bytes = bytes;
+            b.Length = bytes.Length;
+            doc.Add(new SortedDocValuesField("field", b));
+            w.AddDocument(doc);
+            bytes[0] = 1;
+            w.AddDocument(doc);
+            w.ForceMerge(1);
+            DirectoryReader r = w.Reader;
+            BinaryDocValues s = FieldCache.DEFAULT.GetTerms(GetOnlySegmentReader(r), "field", false);
+
+            BytesRef bytes1 = new BytesRef();
+            s.Get(0, bytes1);
+            Assert.AreEqual(bytes.Length, bytes1.Length);
+            bytes[0] = 0;
+            Assert.AreEqual(b, bytes1);
+
+            s.Get(1, bytes1);
+            Assert.AreEqual(bytes.Length, bytes1.Length);
+            bytes[0] = 1;
+            Assert.AreEqual(b, bytes1);
+            r.Dispose();
+            w.Dispose();
+            d.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocValuesUnstored()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig iwconfig = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwconfig.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter writer = new IndexWriter(dir, iwconfig);
+            for (int i = 0; i < 50; i++)
+            {
+                Document doc = new Document();
+                doc.Add(new NumericDocValuesField("dv", i));
+                doc.Add(new TextField("docId", "" + i, Field.Store.YES));
+                writer.AddDocument(doc);
+            }
+            DirectoryReader r = writer.Reader;
+            AtomicReader slow = SlowCompositeReaderWrapper.Wrap(r);
+            FieldInfos fi = slow.FieldInfos;
+            FieldInfo dvInfo = fi.FieldInfo("dv");
+            Assert.IsTrue(dvInfo.HasDocValues);
+            NumericDocValues dv = slow.GetNumericDocValues("dv");
+            for (int i = 0; i < 50; i++)
+            {
+                Assert.AreEqual(i, dv.Get(i));
+                Document d = slow.Document(i);
+                // cannot use d.Get("dv") due to another bug!
+                Assert.IsNull(d.GetField("dv"));
+                Assert.AreEqual(Convert.ToString(i), d.Get("docId"));
+            }
+            slow.Dispose();
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        // Same field in one document as different types:
+        [Test]
+        public virtual void TestMixedTypesSameDocument()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Two documents with same field as different types:
+        [Test]
+        public virtual void TestMixedTypesDifferentDocuments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddSortedTwice()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo!")));
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("bar!")));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddBinaryTwice()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            doc.Add(new BinaryDocValuesField("dv", new BytesRef("foo!")));
+            doc.Add(new BinaryDocValuesField("dv", new BytesRef("bar!")));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestAddNumericTwice()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 1));
+            doc.Add(new NumericDocValuesField("dv", 2));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("didn't hit expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTooLargeSortedBytes()
+        {
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            var bytes = new byte[100000];
+            BytesRef b = new BytesRef(bytes);
+            Random().NextBytes(bytes);
+            doc.Add(new SortedDocValuesField("dv", b));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("did not get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTooLargeTermSortedSetBytes()
+        {
+            AssumeTrue("codec does not support SORTED_SET", DefaultCodecSupportsSortedSet());
+            Analyzer analyzer = new MockAnalyzer(Random());
+
+            Directory directory = NewDirectory();
+            // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
+            iwc.SetMergePolicy(NewLogMergePolicy());
+            IndexWriter iwriter = new IndexWriter(directory, iwc);
+            Document doc = new Document();
+            byte[] bytes = new byte[100000];
+            BytesRef b = new BytesRef(bytes);
+            Random().NextBytes((byte[])(Array)bytes);
+            doc.Add(new SortedSetDocValuesField("dv", b));
+            try
+            {
+                iwriter.AddDocument(doc);
+                Assert.Fail("did not get expected exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException expected)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            iwriter.Dispose();
+            directory.Dispose();
+        }
+
+        // Two documents across segments
+        [Test]
+        public virtual void TestMixedTypesDifferentSegments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+            w.Commit();
+
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            try
+            {
+                w.AddDocument(doc);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Add inconsistent document after deleteAll
+        [Test]
+        public virtual void TestMixedTypesAfterDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+            w.DeleteAll();
+
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            w.AddDocument(doc);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Add inconsistent document after reopening IW w/ create
+        [Test]
+        public virtual void TestMixedTypesAfterReopenCreate()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+            w.Dispose();
+
+            IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            iwc.SetOpenMode(OpenMode.CREATE);
+            w = new IndexWriter(dir, iwc);
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            w.AddDocument(doc);
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        // Two documents with same field as different types, added
+        // from separate threads:
+        [Test]
+        public virtual void TestMixedTypesDifferentThreads()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+
+            CountdownEvent startingGun = new CountdownEvent(1);
+            AtomicBoolean hitExc = new AtomicBoolean();
+            ThreadClass[] threads = new ThreadClass[3];
+            for (int i = 0; i < 3; i++)
+            {
+                Field field;
+                if (i == 0)
+                {
+                    field = new SortedDocValuesField("foo", new BytesRef("hello"));
+                }
+                else if (i == 1)
+                {
+                    field = new NumericDocValuesField("foo", 0);
+                }
+                else
+                {
+                    field = new BinaryDocValuesField("foo", new BytesRef("bazz"));
+                }
+                Document doc = new Document();
+                doc.Add(field);
+
+                threads[i] = new ThreadAnonymousInnerClassHelper(this, w, startingGun, hitExc, doc);
+                threads[i].Start();
+            }
+
+            startingGun.Signal();
+
+            foreach (ThreadClass t in threads)
+            {
+                t.Join();
+            }
+            Assert.IsTrue(hitExc.Get());
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestDocValuesIndexing OuterInstance;
+
+            private IndexWriter w;
+            private CountdownEvent StartingGun;
+            private AtomicBoolean HitExc;
+            private Document Doc;
+
+            public ThreadAnonymousInnerClassHelper(TestDocValuesIndexing outerInstance, IndexWriter w, CountdownEvent startingGun, AtomicBoolean hitExc, Document doc)
+            {
+                this.OuterInstance = outerInstance;
+                this.w = w;
+                this.StartingGun = startingGun;
+                this.HitExc = hitExc;
+                this.Doc = doc;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    StartingGun.Wait();
+                    w.AddDocument(Doc);
+                }
+#pragma warning disable 168
+                catch (System.ArgumentException iae)
+#pragma warning restore 168
+                {
+                    // expected
+                    HitExc.Set(true);
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        // Adding documents via addIndexes
+        [Test]
+        public virtual void TestMixedTypesViaAddIndexes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("foo", 0));
+            w.AddDocument(doc);
+
+            // Make 2nd index w/ inconsistent field
+            Directory dir2 = NewDirectory();
+            IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
+            w2.AddDocument(doc);
+            w2.Dispose();
+
+            try
+            {
+                w.AddIndexes(dir2);
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            IndexReader r = DirectoryReader.Open(dir2);
+            try
+            {
+                w.AddIndexes(new IndexReader[] { r });
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+
+            r.Dispose();
+            dir2.Dispose();
+            w.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIllegalTypeChange()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestIllegalTypeChangeAcrossSegments()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterCloseAndDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            writer.DeleteAll();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.DeleteAll();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterCommitAndDeleteAll()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Commit();
+            writer.DeleteAll();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeAfterOpenCreate()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            conf.SetOpenMode(OpenMode.CREATE);
+            writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            writer.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexes()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            try
+            {
+                writer.AddIndexes(dir);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+
+            dir.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexesIR()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            writer.AddDocument(doc);
+            IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dir) };
+            try
+            {
+                writer.AddIndexes(readers);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            readers[0].Dispose();
+            writer.Dispose();
+
+            dir.Dispose();
+            dir2.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexes2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            writer.AddIndexes(dir);
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestTypeChangeViaAddIndexesIR2()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone());
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            Directory dir2 = NewDirectory();
+            writer = new IndexWriter(dir2, (IndexWriterConfig)conf.Clone());
+            IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dir) };
+            writer.AddIndexes(readers);
+            readers[0].Dispose();
+            doc = new Document();
+            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("did not hit exception");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException iae)
+#pragma warning restore 168
+            {
+                // expected
+            }
+            writer.Dispose();
+            dir2.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsWithField()
+        {
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+            Document doc = new Document();
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+
+            doc = new Document();
+            doc.Add(new TextField("dv", "some text", Field.Store.NO));
+            doc.Add(new NumericDocValuesField("dv", 0L));
+            writer.AddDocument(doc);
+
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+
+            AtomicReader subR = (AtomicReader)r.Leaves[0].Reader;
+            Assert.AreEqual(2, subR.NumDocs);
+
+            IBits bits = FieldCache.DEFAULT.GetDocsWithField(subR, "dv");
+            Assert.IsTrue(bits.Get(0));
+            Assert.IsTrue(bits.Get(1));
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestSameFieldNameForPostingAndDocValue()
+        {
+            // LUCENE-5192: FieldInfos.Builder neglected to update
+            // globalFieldNumbers.docValuesType map if the field existed, resulting in
+            // potentially adding the same field with different DV types.
+            Directory dir = NewDirectory();
+            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
+            IndexWriter writer = new IndexWriter(dir, conf);
+
+            Document doc = new Document();
+            doc.Add(new StringField("f", "mock-value", Field.Store.NO));
+            doc.Add(new NumericDocValuesField("f", 5));
+            writer.AddDocument(doc);
+            writer.Commit();
+
+            doc = new Document();
+            doc.Add(new BinaryDocValuesField("f", new BytesRef("mock")));
+            try
+            {
+                writer.AddDocument(doc);
+                Assert.Fail("should not have succeeded to add a field with different DV type than what already exists");
+            }
+#pragma warning disable 168
+            catch (System.ArgumentException e)
+#pragma warning restore 168
+            {
+                writer.Rollback();
+            }
+
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
new file mode 100644
index 0000000..40cc370
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocValuesWithThreads.cs
@@ -0,0 +1,311 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Lucene.Net.Documents;
+using Lucene.Net.Search;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using Lucene.Net.Support;
+    using NUnit.Framework;
+    using System.IO;
+    using System.Threading;
+    using BinaryDocValuesField = BinaryDocValuesField;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using Document = Documents.Document;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using NumericDocValuesField = NumericDocValuesField;
+    using SortedDocValuesField = SortedDocValuesField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+
+    [SuppressCodecs("Lucene3x")]
+    [TestFixture]
+    public class TestDocValuesWithThreads : LuceneTestCase
+    {
+        [Test]
+        public virtual void Test()
+        {
+            Directory dir = NewDirectory();
+            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+
+            IList<long?> numbers = new List<long?>();
+            IList<BytesRef> binary = new List<BytesRef>();
+            IList<BytesRef> sorted = new List<BytesRef>();
+            int numDocs = AtLeast(100);
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document d = new Document();
+                long number = Random().NextLong();
+                d.Add(new NumericDocValuesField("number", number));
+                BytesRef bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
+                d.Add(new BinaryDocValuesField("bytes", bytes));
+                binary.Add(bytes);
+                bytes = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random()));
+                d.Add(new SortedDocValuesField("sorted", bytes));
+                sorted.Add(bytes);
+                w.AddDocument(d);
+                numbers.Add(number);
+            }
+
+            w.ForceMerge(1);
+            IndexReader r = w.Reader;
+            w.Dispose();
+
+            Assert.AreEqual(1, r.Leaves.Count);
+            AtomicReader ar = (AtomicReader)r.Leaves[0].Reader;
+
+            int numThreads = TestUtil.NextInt(Random(), 2, 5);
+            IList<ThreadClass> threads = new List<ThreadClass>();
+            CountdownEvent startingGun = new CountdownEvent(1);
+            for (int t = 0; t < numThreads; t++)
+            {
+                Random threadRandom = new Random(Random().Next());
+                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, numbers, binary, sorted, numDocs, ar, startingGun, threadRandom);
+                thread.Start();
+                threads.Add(thread);
+            }
+
+            startingGun.Signal();
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper : ThreadClass
+        {
+            private readonly TestDocValuesWithThreads OuterInstance;
+
+            private IList<long?> Numbers;
+            private IList<BytesRef> Binary;
+            private IList<BytesRef> Sorted;
+            private int NumDocs;
+            private AtomicReader Ar;
+            private CountdownEvent StartingGun;
+            private Random ThreadRandom;
+
+            public ThreadAnonymousInnerClassHelper(TestDocValuesWithThreads outerInstance, IList<long?> numbers, IList<BytesRef> binary, IList<BytesRef> sorted, int numDocs, AtomicReader ar, CountdownEvent startingGun, Random threadRandom)
+            {
+                this.OuterInstance = outerInstance;
+                this.Numbers = numbers;
+                this.Binary = binary;
+                this.Sorted = sorted;
+                this.NumDocs = numDocs;
+                this.Ar = ar;
+                this.StartingGun = startingGun;
+                this.ThreadRandom = threadRandom;
+            }
+
+            public override void Run()
+            {
+                try
+                {
+                    //NumericDocValues ndv = ar.GetNumericDocValues("number");
+                    FieldCache.Int64s ndv = FieldCache.DEFAULT.GetInt64s(Ar, "number", false);
+                    //BinaryDocValues bdv = ar.GetBinaryDocValues("bytes");
+                    BinaryDocValues bdv = FieldCache.DEFAULT.GetTerms(Ar, "bytes", false);
+                    SortedDocValues sdv = FieldCache.DEFAULT.GetTermsIndex(Ar, "sorted");
+                    StartingGun.Wait();
+                    int iters = AtLeast(1000);
+                    BytesRef scratch = new BytesRef();
+                    BytesRef scratch2 = new BytesRef();
+                    for (int iter = 0; iter < iters; iter++)
+                    {
+                        int docID = ThreadRandom.Next(NumDocs);
+                        switch (ThreadRandom.Next(6))
+                        {
+#pragma warning disable 612, 618
+                            case 0:
+                                Assert.AreEqual((long)(sbyte)Numbers[docID], FieldCache.DEFAULT.GetBytes(Ar, "number", false).Get(docID));
+                                break;
+
+                            case 1:
+                                Assert.AreEqual((long)(short)Numbers[docID], FieldCache.DEFAULT.GetInt16s(Ar, "number", false).Get(docID));
+                                break;
+#pragma warning restore 612, 618
+
+                            case 2:
+                                Assert.AreEqual((long)(int)Numbers[docID], FieldCache.DEFAULT.GetInt32s(Ar, "number", false).Get(docID));
+                                break;
+
+                            case 3:
+                                Assert.AreEqual((long)Numbers[docID], FieldCache.DEFAULT.GetInt64s(Ar, "number", false).Get(docID));
+                                break;
+
+                            case 4:
+                                Assert.AreEqual(Number.Int32BitsToSingle((int)Numbers[docID]), FieldCache.DEFAULT.GetSingles(Ar, "number", false).Get(docID), 0.0f);
+                                break;
+
+                            case 5:
+                                Assert.AreEqual(BitConverter.Int64BitsToDouble((long)Numbers[docID]), FieldCache.DEFAULT.GetDoubles(Ar, "number", false).Get(docID), 0.0);
+                                break;
+                        }
+                        bdv.Get(docID, scratch);
+                        Assert.AreEqual(Binary[docID], scratch);
+                        // Cannot share a single scratch against two "sources":
+                        sdv.Get(docID, scratch2);
+                        Assert.AreEqual(Sorted[docID], scratch2);
+                    }
+                }
+                catch (Exception e)
+                {
+                    throw new Exception(e.Message, e);
+                }
+            }
+        }
+
+        [Test]
+        public virtual void Test2()
+        {
+            Random random = Random();
+            int NUM_DOCS = AtLeast(100);
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(random, dir, Similarity, TimeZone);
+            bool allowDups = random.NextBoolean();
+            HashSet<string> seen = new HashSet<string>();
+            if (VERBOSE)
+            {
+                Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS + " allowDups=" + allowDups);
+            }
+            int numDocs = 0;
+            IList<BytesRef> docValues = new List<BytesRef>();
+
+            // TODO: deletions
+            while (numDocs < NUM_DOCS)
+            {
+                string s;
+                if (random.NextBoolean())
+                {
+                    s = TestUtil.RandomSimpleString(random);
+                }
+                else
+                {
+                    s = TestUtil.RandomUnicodeString(random);
+                }
+                BytesRef br = new BytesRef(s);
+
+                if (!allowDups)
+                {
+                    if (seen.Contains(s))
+                    {
+                        continue;
+                    }
+                    seen.Add(s);
+                }
+
+                if (VERBOSE)
+                {
+                    Console.WriteLine("  " + numDocs + ": s=" + s);
+                }
+
+                Document doc = new Document();
+                doc.Add(new SortedDocValuesField("stringdv", br));
+                doc.Add(new NumericDocValuesField("id", numDocs));
+                docValues.Add(br);
+                writer.AddDocument(doc);
+                numDocs++;
+
+                if (random.Next(40) == 17)
+                {
+                    // force flush
+                    writer.Reader.Dispose();
+                }
+            }
+
+            writer.ForceMerge(1);
+            DirectoryReader r = writer.Reader;
+            writer.Dispose();
+
+            AtomicReader sr = GetOnlySegmentReader(r);
+
+            long END_TIME = Environment.TickCount + (TEST_NIGHTLY ? 30 : 1);
+
+            int NUM_THREADS = TestUtil.NextInt(Random(), 1, 10);
+            ThreadClass[] threads = new ThreadClass[NUM_THREADS];
+            for (int thread = 0; thread < NUM_THREADS; thread++)
+            {
+                threads[thread] = new ThreadAnonymousInnerClassHelper2(random, docValues, sr, END_TIME);
+                threads[thread].Start();
+            }
+
+            foreach (ThreadClass thread in threads)
+            {
+                thread.Join();
+            }
+
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        private class ThreadAnonymousInnerClassHelper2 : ThreadClass
+        {
+            private Random Random;
+            private IList<BytesRef> DocValues;
+            private AtomicReader Sr;
+            private long END_TIME;
+
+            public ThreadAnonymousInnerClassHelper2(Random random, IList<BytesRef> docValues, AtomicReader sr, long END_TIME)
+            {
+                this.Random = random;
+                this.DocValues = docValues;
+                this.Sr = sr;
+                this.END_TIME = END_TIME;
+            }
+
+            public override void Run()
+            {
+                Random random = Random();
+                SortedDocValues stringDVDirect;
+                NumericDocValues docIDToID;
+                try
+                {
+                    stringDVDirect = Sr.GetSortedDocValues("stringdv");
+                    docIDToID = Sr.GetNumericDocValues("id");
+                    Assert.IsNotNull(stringDVDirect);
+                }
+                catch (IOException ioe)
+                {
+                    throw new Exception(ioe.Message, ioe);
+                }
+                while (Environment.TickCount < END_TIME)
+                {
+                    SortedDocValues source;
+                    source = stringDVDirect;
+                    BytesRef scratch = new BytesRef();
+
+                    for (int iter = 0; iter < 100; iter++)
+                    {
+                        int docID = random.Next(Sr.MaxDoc);
+                        source.Get(docID, scratch);
+                        Assert.AreEqual(DocValues[(int)docIDToID.Get(docID)], scratch);
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
new file mode 100644
index 0000000..03ba737
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocsAndPositions.cs
@@ -0,0 +1,430 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    using Lucene.Net.Randomized.Generators;
+    using NUnit.Framework;
+    using IBits = Lucene.Net.Util.IBits;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+
+    /*
+         * Licensed to the Apache Software Foundation (ASF) under one or more
+         * contributor license agreements.  See the NOTICE file distributed with
+         * this work for additional information regarding copyright ownership.
+         * The ASF licenses this file to You under the Apache License, Version 2.0
+         * (the "License"); you may not use this file except in compliance with
+         * the License.  You may obtain a copy of the License at
+         *
+         *     http://www.apache.org/licenses/LICENSE-2.0
+         *
+         * Unless required by applicable law or agreed to in writing, software
+         * distributed under the License is distributed on an "AS IS" BASIS,
+         * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+         * See the License for the specific language governing permissions and
+         * limitations under the License.
+         */
+
+    using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestDocsAndPositions : LuceneTestCase
+    {
+        private string FieldName;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            FieldName = "field" + Random().Next();
+        }
+
+        /// <summary>
+        /// Simple testcase for <seealso cref="DocsAndPositionsEnum"/>
+        /// </summary>
+        [Test]
+        public virtual void TestPositionsSimple()
+        {
+            Directory directory = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            for (int i = 0; i < 39; i++)
+            {
+                Document doc = new Document();
+                FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+                customType.OmitNorms = true;
+                doc.Add(NewField(FieldName, "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10 " + "1 2 3 4 5 6 7 8 9 10", customType));
+                writer.AddDocument(doc);
+            }
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num = AtLeast(13);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef bytes = new BytesRef("1");
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
+                {
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
+                    Assert.IsNotNull(docsAndPosEnum);
+                    if (atomicReaderContext.Reader.MaxDoc == 0)
+                    {
+                        continue;
+                    }
+                    int advance = docsAndPosEnum.Advance(Random().Next(atomicReaderContext.Reader.MaxDoc));
+                    do
+                    {
+                        string msg = "Advanced to: " + advance + " current doc: " + docsAndPosEnum.DocID; // TODO: + " usePayloads: " + usePayload;
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(0, docsAndPosEnum.NextPosition(), msg);
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(10, docsAndPosEnum.NextPosition(), msg);
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(20, docsAndPosEnum.NextPosition(), msg);
+                        Assert.AreEqual(4, docsAndPosEnum.Freq, msg);
+                        Assert.AreEqual(30, docsAndPosEnum.NextPosition(), msg);
+                    } while (docsAndPosEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                }
+            }
+            reader.Dispose();
+            directory.Dispose();
+        }
+
+        public virtual DocsAndPositionsEnum GetDocsAndPositions(AtomicReader reader, BytesRef bytes, IBits liveDocs)
+        {
+            Terms terms = reader.Terms(FieldName);
+            if (terms != null)
+            {
+                TermsEnum te = terms.GetIterator(null);
+                if (te.SeekExact(bytes))
+                {
+                    return te.DocsAndPositions(liveDocs, null);
+                }
+            }
+            return null;
+        }
+
+        /// <summary>
+        /// this test indexes random numbers within a range into a field and checks
+        /// their occurrences by searching for a number from that range selected at
+        /// random. All positions for that number are saved up front and compared to
+        /// the enums positions.
+        /// </summary>
+        [Test]
+        public virtual void TestRandomPositions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            int numDocs = AtLeast(47);
+            int max = 1051;
+            int term = Random().Next(max);
+            int?[][] positionsInDoc = new int?[numDocs][];
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                List<int?> positions = new List<int?>();
+                StringBuilder builder = new StringBuilder();
+                int num = AtLeast(131);
+                for (int j = 0; j < num; j++)
+                {
+                    int nextInt = Random().Next(max);
+                    builder.Append(nextInt).Append(" ");
+                    if (nextInt == term)
+                    {
+                        positions.Add(Convert.ToInt32(j));
+                    }
+                }
+                if (positions.Count == 0)
+                {
+                    builder.Append(term);
+                    positions.Add(num);
+                }
+                doc.Add(NewField(FieldName, builder.ToString(), customType));
+                positionsInDoc[i] = positions.ToArray();
+                writer.AddDocument(doc);
+            }
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num_ = AtLeast(13);
+            for (int i = 0; i < num_; i++)
+            {
+                BytesRef bytes = new BytesRef("" + term);
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
+                {
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
+                    Assert.IsNotNull(docsAndPosEnum);
+                    int initDoc = 0;
+                    int maxDoc = atomicReaderContext.Reader.MaxDoc;
+                    // initially advance or do next doc
+                    if (Random().NextBoolean())
+                    {
+                        initDoc = docsAndPosEnum.NextDoc();
+                    }
+                    else
+                    {
+                        initDoc = docsAndPosEnum.Advance(Random().Next(maxDoc));
+                    }
+                    // now run through the scorer and check if all positions are there...
+                    do
+                    {
+                        int docID = docsAndPosEnum.DocID;
+                        if (docID == DocIdSetIterator.NO_MORE_DOCS)
+                        {
+                            break;
+                        }
+                        int?[] pos = positionsInDoc[atomicReaderContext.DocBase + docID];
+                        Assert.AreEqual(pos.Length, docsAndPosEnum.Freq);
+                        // number of positions read should be random - don't read all of them
+                        // allways
+                        int howMany = Random().Next(20) == 0 ? pos.Length - Random().Next(pos.Length) : pos.Length;
+                        for (int j = 0; j < howMany; j++)
+                        {
+                            Assert.AreEqual(pos[j], docsAndPosEnum.NextPosition(), "iteration: " + i + " initDoc: " + initDoc + " doc: " + docID + " base: " + atomicReaderContext.DocBase + " positions: " + pos); /* TODO: + " usePayloads: "
+	                + usePayload*/
+                        }
+
+                        if (Random().Next(10) == 0) // once is a while advance
+                        {
+                            if (docsAndPosEnum.Advance(docID + 1 + Random().Next((maxDoc - docID))) == DocIdSetIterator.NO_MORE_DOCS)
+                            {
+                                break;
+                            }
+                        }
+                    } while (docsAndPosEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+                }
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestRandomDocs()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
+            int numDocs = AtLeast(49);
+            int max = 15678;
+            int term = Random().Next(max);
+            int[] freqInDoc = new int[numDocs];
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            for (int i = 0; i < numDocs; i++)
+            {
+                Document doc = new Document();
+                StringBuilder builder = new StringBuilder();
+                for (int j = 0; j < 199; j++)
+                {
+                    int nextInt = Random().Next(max);
+                    builder.Append(nextInt).Append(' ');
+                    if (nextInt == term)
+                    {
+                        freqInDoc[i]++;
+                    }
+                }
+                doc.Add(NewField(FieldName, builder.ToString(), customType));
+                writer.AddDocument(doc);
+            }
+
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num = AtLeast(13);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef bytes = new BytesRef("" + term);
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext context in topReaderContext.Leaves)
+                {
+                    int maxDoc = context.AtomicReader.MaxDoc;
+                    DocsEnum docsEnum = TestUtil.Docs(Random(), context.Reader, FieldName, bytes, null, null, DocsEnum.FLAG_FREQS);
+                    if (FindNext(freqInDoc, context.DocBase, context.DocBase + maxDoc) == int.MaxValue)
+                    {
+                        Assert.IsNull(docsEnum);
+                        continue;
+                    }
+                    Assert.IsNotNull(docsEnum);
+                    docsEnum.NextDoc();
+                    for (int j = 0; j < maxDoc; j++)
+                    {
+                        if (freqInDoc[context.DocBase + j] != 0)
+                        {
+                            Assert.AreEqual(j, docsEnum.DocID);
+                            Assert.AreEqual(docsEnum.Freq, freqInDoc[context.DocBase + j]);
+                            if (i % 2 == 0 && Random().Next(10) == 0)
+                            {
+                                int next = FindNext(freqInDoc, context.DocBase + j + 1, context.DocBase + maxDoc) - context.DocBase;
+                                int advancedTo = docsEnum.Advance(next);
+                                if (next >= maxDoc)
+                                {
+                                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, advancedTo);
+                                }
+                                else
+                                {
+                                    Assert.IsTrue(next >= advancedTo, "advanced to: " + advancedTo + " but should be <= " + next);
+                                }
+                            }
+                            else
+                            {
+                                docsEnum.NextDoc();
+                            }
+                        }
+                    }
+                    Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, docsEnum.DocID, "DocBase: " + context.DocBase + " maxDoc: " + maxDoc + " " + docsEnum.GetType());
+                }
+            }
+
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        private static int FindNext(int[] docs, int pos, int max)
+        {
+            for (int i = pos; i < max; i++)
+            {
+                if (docs[i] != 0)
+                {
+                    return i;
+                }
+            }
+            return int.MaxValue;
+        }
+
+        /// <summary>
+        /// tests retrieval of positions for terms that have a large number of
+        /// occurrences to force test of buffer refill during positions iteration.
+        /// </summary>
+        [Test]
+        public virtual void TestLargeNumberOfPositions()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            int howMany = 1000;
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            for (int i = 0; i < 39; i++)
+            {
+                Document doc = new Document();
+                StringBuilder builder = new StringBuilder();
+                for (int j = 0; j < howMany; j++)
+                {
+                    if (j % 2 == 0)
+                    {
+                        builder.Append("even ");
+                    }
+                    else
+                    {
+                        builder.Append("odd ");
+                    }
+                }
+                doc.Add(NewField(FieldName, builder.ToString(), customType));
+                writer.AddDocument(doc);
+            }
+
+            // now do searches
+            IndexReader reader = writer.Reader;
+            writer.Dispose();
+
+            int num = AtLeast(13);
+            for (int i = 0; i < num; i++)
+            {
+                BytesRef bytes = new BytesRef("even");
+
+                IndexReaderContext topReaderContext = reader.Context;
+                foreach (AtomicReaderContext atomicReaderContext in topReaderContext.Leaves)
+                {
+                    DocsAndPositionsEnum docsAndPosEnum = GetDocsAndPositions((AtomicReader)atomicReaderContext.Reader, bytes, null);
+                    Assert.IsNotNull(docsAndPosEnum);
+
+                    int initDoc = 0;
+                    int maxDoc = atomicReaderContext.Reader.MaxDoc;
+                    // initially advance or do next doc
+                    if (Random().NextBoolean())
+                    {
+                        initDoc = docsAndPosEnum.NextDoc();
+                    }
+                    else
+                    {
+                        initDoc = docsAndPosEnum.Advance(Random().Next(maxDoc));
+                    }
+                    string msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload;
+                    Assert.AreEqual(howMany / 2, docsAndPosEnum.Freq);
+                    for (int j = 0; j < howMany; j += 2)
+                    {
+                        Assert.AreEqual(j, docsAndPosEnum.NextPosition(), "position missmatch index: " + j + " with freq: " + docsAndPosEnum.Freq + " -- " + msg);
+                    }
+                }
+            }
+            reader.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsEnumStart()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewStringField("foo", "bar", Field.Store.NO));
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            AtomicReader r = GetOnlySegmentReader(reader);
+            DocsEnum disi = TestUtil.Docs(Random(), r, "foo", new BytesRef("bar"), null, null, DocsEnum.FLAG_NONE);
+            int docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            // now reuse and check again
+            TermsEnum te = r.Terms("foo").GetIterator(null);
+            Assert.IsTrue(te.SeekExact(new BytesRef("bar")));
+            disi = TestUtil.Docs(Random(), te, null, disi, DocsEnum.FLAG_NONE);
+            docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            writer.Dispose();
+            r.Dispose();
+            dir.Dispose();
+        }
+
+        [Test]
+        public virtual void TestDocsAndPositionsEnumStart()
+        {
+            Directory dir = NewDirectory();
+            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);
+            Document doc = new Document();
+            doc.Add(NewTextField("foo", "bar", Field.Store.NO));
+            writer.AddDocument(doc);
+            DirectoryReader reader = writer.Reader;
+            AtomicReader r = GetOnlySegmentReader(reader);
+            DocsAndPositionsEnum disi = r.TermPositionsEnum(new Term("foo", "bar"));
+            int docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+
+            // now reuse and check again
+            TermsEnum te = r.Terms("foo").GetIterator(null);
+            Assert.IsTrue(te.SeekExact(new BytesRef("bar")));
+            disi = te.DocsAndPositions(null, disi);
+            docid = disi.DocID;
+            Assert.AreEqual(-1, docid);
+            Assert.IsTrue(disi.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            writer.Dispose();
+            r.Dispose();
+            dir.Dispose();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/96822396/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
new file mode 100644
index 0000000..fe61f04
--- /dev/null
+++ b/src/Lucene.Net.Tests/Index/TestDocumentWriter.cs
@@ -0,0 +1,409 @@
+using Lucene.Net.Analysis.TokenAttributes;
+using Lucene.Net.Documents;
+
+namespace Lucene.Net.Index
+{
+    /*
+     * Licensed to the Apache Software Foundation (ASF) under one or more
+     * contributor license agreements.  See the NOTICE file distributed with
+     * this work for additional information regarding copyright ownership.
+     * The ASF licenses this file to You under the Apache License, Version 2.0
+     * (the "License"); you may not use this file except in compliance with
+     * the License.  You may obtain a copy of the License at
+     *
+     *     http://www.apache.org/licenses/LICENSE-2.0
+     *
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     */
+
+    using Lucene.Net.Analysis;
+    using NUnit.Framework;
+    using System.IO;
+    using AttributeSource = Lucene.Net.Util.AttributeSource;
+    using BytesRef = Lucene.Net.Util.BytesRef;
+    using Directory = Lucene.Net.Store.Directory;
+    using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
+    using Document = Documents.Document;
+    using Field = Field;
+    using FieldType = FieldType;
+    using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
+    using StringField = StringField;
+    using TestUtil = Lucene.Net.Util.TestUtil;
+    using TextField = TextField;
+
+    [TestFixture]
+    public class TestDocumentWriter : LuceneTestCase
+    {
+        private Directory Dir;
+
+        [SetUp]
+        public override void SetUp()
+        {
+            base.SetUp();
+            Dir = NewDirectory();
+        }
+
+        [TearDown]
+        public override void TearDown()
+        {
+            Dir.Dispose();
+            base.TearDown();
+        }
+
+        [Test]
+        public virtual void Test()
+        {
+            Assert.IsTrue(Dir != null);
+        }
+
+        [Test]
+        public virtual void TestAddDocument()
+        {
+            Document testDoc = new Document();
+            DocHelper.SetupDoc(testDoc);
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(testDoc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            //After adding the document, we should be able to read it back in
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+            Assert.IsTrue(reader != null);
+            Document doc = reader.Document(0);
+            Assert.IsTrue(doc != null);
+
+            //System.out.println("Document: " + doc);
+            IIndexableField[] fields = doc.GetFields("textField2");
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_2_TEXT));
+            Assert.IsTrue(fields[0].FieldType.StoreTermVectors);
+
+            fields = doc.GetFields("textField1");
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_1_TEXT));
+            Assert.IsFalse(fields[0].FieldType.StoreTermVectors);
+
+            fields = doc.GetFields("keyField");
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.KEYWORD_TEXT));
+
+            fields = doc.GetFields(DocHelper.NO_NORMS_KEY);
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.NO_NORMS_TEXT));
+
+            fields = doc.GetFields(DocHelper.TEXT_FIELD_3_KEY);
+            Assert.IsTrue(fields != null && fields.Length == 1);
+            Assert.IsTrue(fields[0].GetStringValue().Equals(DocHelper.FIELD_3_TEXT));
+
+            // test that the norms are not present in the segment if
+            // omitNorms is true
+            foreach (FieldInfo fi in reader.FieldInfos)
+            {
+                if (fi.IsIndexed)
+                {
+                    Assert.IsTrue(fi.OmitsNorms == (reader.GetNormValues(fi.Name) == null));
+                }
+            }
+            reader.Dispose();
+        }
+
+        [Test]
+        public virtual void TestPositionIncrementGap()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper(this);
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+
+            Document doc = new Document();
+            doc.Add(NewTextField("repeated", "repeated one", Field.Store.YES));
+            doc.Add(NewTextField("repeated", "repeated two", Field.Store.YES));
+
+            writer.AddDocument(doc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), "repeated", new BytesRef("repeated"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            int freq = termPositions.Freq;
+            Assert.AreEqual(2, freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+            Assert.AreEqual(502, termPositions.NextPosition());
+            reader.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper : Analyzer
+        {
+            private readonly TestDocumentWriter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper(TestDocumentWriter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false));
+            }
+
+            public override int GetPositionIncrementGap(string fieldName)
+            {
+                return 500;
+            }
+        }
+
+        [Test]
+        public virtual void TestTokenReuse()
+        {
+            Analyzer analyzer = new AnalyzerAnonymousInnerClassHelper2(this);
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
+
+            Document doc = new Document();
+            doc.Add(NewTextField("f1", "a 5 a a", Field.Store.YES));
+
+            writer.AddDocument(doc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            DocsAndPositionsEnum termPositions = MultiFields.GetTermPositionsEnum(reader, reader.LiveDocs, "f1", new BytesRef("a"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            int freq = termPositions.Freq;
+            Assert.AreEqual(3, freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+            Assert.IsNotNull(termPositions.GetPayload());
+            Assert.AreEqual(6, termPositions.NextPosition());
+            Assert.IsNull(termPositions.GetPayload());
+            Assert.AreEqual(7, termPositions.NextPosition());
+            Assert.IsNull(termPositions.GetPayload());
+            reader.Dispose();
+        }
+
+        private class AnalyzerAnonymousInnerClassHelper2 : Analyzer
+        {
+            private readonly TestDocumentWriter OuterInstance;
+
+            public AnalyzerAnonymousInnerClassHelper2(TestDocumentWriter outerInstance)
+            {
+                this.OuterInstance = outerInstance;
+            }
+
+            protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader)
+            {
+                Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
+                return new TokenStreamComponents(tokenizer, new TokenFilterAnonymousInnerClassHelper(this, tokenizer));
+            }
+
+            private class TokenFilterAnonymousInnerClassHelper : TokenFilter
+            {
+                private readonly AnalyzerAnonymousInnerClassHelper2 OuterInstance;
+
+                public TokenFilterAnonymousInnerClassHelper(AnalyzerAnonymousInnerClassHelper2 outerInstance, Tokenizer tokenizer)
+                    : base(tokenizer)
+                {
+                    this.OuterInstance = outerInstance;
+                    first = true;
+                    termAtt = AddAttribute<ICharTermAttribute>();
+                    payloadAtt = AddAttribute<IPayloadAttribute>();
+                    posIncrAtt = AddAttribute<IPositionIncrementAttribute>();
+                }
+
+                internal bool first;
+                internal AttributeSource.State state;
+
+                public sealed override bool IncrementToken()
+                {
+                    if (state != null)
+                    {
+                        RestoreState(state);
+                        payloadAtt.Payload = null;
+                        posIncrAtt.PositionIncrement = 0;
+                        termAtt.SetEmpty().Append("b");
+                        state = null;
+                        return true;
+                    }
+
+                    bool hasNext = m_input.IncrementToken();
+                    if (!hasNext)
+                    {
+                        return false;
+                    }
+                    if (char.IsDigit(termAtt.Buffer[0]))
+                    {
+                        posIncrAtt.PositionIncrement = termAtt.Buffer[0] - '0';
+                    }
+                    if (first)
+                    {
+                        // set payload on first position only
+                        payloadAtt.Payload = new BytesRef(new byte[] { 100 });
+                        first = false;
+                    }
+
+                    // index a "synonym" for every token
+                    state = CaptureState();
+                    return true;
+                }
+
+                public sealed override void Reset()
+                {
+                    base.Reset();
+                    first = true;
+                    state = null;
+                }
+
+                internal readonly ICharTermAttribute termAtt;
+                internal readonly IPayloadAttribute payloadAtt;
+                internal readonly IPositionIncrementAttribute posIncrAtt;
+            }
+        }
+
+        [Test]
+        public virtual void TestPreAnalyzedField()
+        {
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            Document doc = new Document();
+
+            doc.Add(new TextField("preanalyzed", new TokenStreamAnonymousInnerClassHelper(this)));
+
+            writer.AddDocument(doc);
+            writer.Commit();
+            SegmentCommitInfo info = writer.NewestSegment();
+            writer.Dispose();
+            SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, NewIOContext(Random()));
+
+            DocsAndPositionsEnum termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term1"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, termPositions.Freq);
+            Assert.AreEqual(0, termPositions.NextPosition());
+
+            termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term2"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(2, termPositions.Freq);
+            Assert.AreEqual(1, termPositions.NextPosition());
+            Assert.AreEqual(3, termPositions.NextPosition());
+
+            termPositions = reader.TermPositionsEnum(new Term("preanalyzed", "term3"));
+            Assert.IsTrue(termPositions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+            Assert.AreEqual(1, termPositions.Freq);
+            Assert.AreEqual(2, termPositions.NextPosition());
+            reader.Dispose();
+        }
+
+        private class TokenStreamAnonymousInnerClassHelper : TokenStream
+        {
+            private readonly TestDocumentWriter OuterInstance;
+
+            public TokenStreamAnonymousInnerClassHelper(TestDocumentWriter outerInstance) 
+            {
+                this.OuterInstance = outerInstance;
+                tokens = new string[] { "term1", "term2", "term3", "term2" };
+                index = 0;
+                termAtt = AddAttribute<ICharTermAttribute>();
+            }
+
+            private string[] tokens;
+            private int index;
+
+            private ICharTermAttribute termAtt;
+
+            public sealed override bool IncrementToken()
+            {
+                if (index == tokens.Length)
+                {
+                    return false;
+                }
+                else
+                {
+                    ClearAttributes();
+                    termAtt.SetEmpty().Append(tokens[index++]);
+                    return true;
+                }
+            }
+        }
+
+        /// <summary>
+        /// Test adding two fields with the same name, but
+        /// with different term vector setting (LUCENE-766).
+        /// </summary>
+        [Test]
+        public virtual void TestMixedTermVectorSettingsSameField()
+        {
+            Document doc = new Document();
+            // f1 first without tv then with tv
+            doc.Add(NewStringField("f1", "v1", Field.Store.YES));
+            FieldType customType2 = new FieldType(StringField.TYPE_STORED);
+            customType2.StoreTermVectors = true;
+            customType2.StoreTermVectorOffsets = true;
+            customType2.StoreTermVectorPositions = true;
+            doc.Add(NewField("f1", "v2", customType2));
+            // f2 first with tv then without tv
+            doc.Add(NewField("f2", "v1", customType2));
+            doc.Add(NewStringField("f2", "v2", Field.Store.YES));
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(doc);
+            writer.Dispose();
+
+            TestUtil.CheckIndex(Dir);
+
+            IndexReader reader = DirectoryReader.Open(Dir);
+            // f1
+            Terms tfv1 = reader.GetTermVectors(0).GetTerms("f1");
+            Assert.IsNotNull(tfv1);
+            Assert.AreEqual(2, tfv1.Count, "the 'with_tv' setting should rule!");
+            // f2
+            Terms tfv2 = reader.GetTermVectors(0).GetTerms("f2");
+            Assert.IsNotNull(tfv2);
+            Assert.AreEqual(2, tfv2.Count, "the 'with_tv' setting should rule!");
+            reader.Dispose();
+        }
+
+        /// <summary>
+        /// Test adding two fields with the same name, one indexed
+        /// the other stored only. The omitNorms and omitTermFreqAndPositions setting
+        /// of the stored field should not affect the indexed one (LUCENE-1590)
+        /// </summary>
+        [Test]
+        public virtual void TestLUCENE_1590()
+        {
+            Document doc = new Document();
+            // f1 has no norms
+            FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
+            customType.OmitNorms = true;
+            FieldType customType2 = new FieldType();
+            customType2.IsStored = true;
+            doc.Add(NewField("f1", "v1", customType));
+            doc.Add(NewField("f1", "v2", customType2));
+            // f2 has no TF
+            FieldType customType3 = new FieldType(TextField.TYPE_NOT_STORED);
+            customType3.IndexOptions = IndexOptions.DOCS_ONLY;
+            Field f = NewField("f2", "v1", customType3);
+            doc.Add(f);
+            doc.Add(NewField("f2", "v2", customType2));
+
+            IndexWriter writer = new IndexWriter(Dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
+            writer.AddDocument(doc);
+            writer.ForceMerge(1); // be sure to have a single segment
+            writer.Dispose();
+
+            TestUtil.CheckIndex(Dir);
+
+            SegmentReader reader = GetOnlySegmentReader(DirectoryReader.Open(Dir));
+            FieldInfos fi = reader.FieldInfos;
+            // f1
+            Assert.IsFalse(fi.FieldInfo("f1").HasNorms, "f1 should have no norms");
+            Assert.AreEqual(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.FieldInfo("f1").IndexOptions, "omitTermFreqAndPositions field bit should not be set for f1");
+            // f2
+            Assert.IsTrue(fi.FieldInfo("f2").HasNorms, "f2 should have norms");
+            Assert.AreEqual(IndexOptions.DOCS_ONLY, fi.FieldInfo("f2").IndexOptions, "omitTermFreqAndPositions field bit should be set for f2");
+            reader.Dispose();
+        }
+    }
+}
\ No newline at end of file


Mime
View raw message