lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [37/62] [abbrv] [partial] lucenenet git commit: Renamed Lucene.Net.Core folder Lucene.Net because the dotnet.exe pack command doesn't allow creating a NuGet package with a different name than its folder. Working around it with the script was much more co
Date Tue, 04 Apr 2017 17:19:43 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
deleted file mode 100644
index 29516db..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40PostingsReader.cs
+++ /dev/null
@@ -1,1315 +0,0 @@
-using Lucene.Net.Index;
-using Lucene.Net.Support;
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using DataInput = Lucene.Net.Store.DataInput;
-    using Directory = Lucene.Net.Store.Directory;
-    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-    using TermState = Lucene.Net.Index.TermState;
-
-    /// <summary>
-    /// Concrete class that reads the 4.0 frq/prox
-    /// postings format.
-    /// </summary>
-    ///  <seealso cref= Lucene40PostingsFormat </seealso>
-    ///  @deprecated Only for reading old 4.0 segments
-    [Obsolete("Only for reading old 4.0 segments")]
-    public class Lucene40PostingsReader : PostingsReaderBase
-    {
-        internal static readonly string TERMS_CODEC = "Lucene40PostingsWriterTerms";
-        internal static readonly string FRQ_CODEC = "Lucene40PostingsWriterFrq";
-        internal static readonly string PRX_CODEC = "Lucene40PostingsWriterPrx";
-
-        //private static boolean DEBUG = BlockTreeTermsWriter.DEBUG;
-
-        // Increment version to change it:
-        internal static readonly int VERSION_START = 0;
-
-        internal static readonly int VERSION_LONG_SKIP = 1;
-        internal static readonly int VERSION_CURRENT = VERSION_LONG_SKIP;
-
-        private readonly IndexInput freqIn;
-        private readonly IndexInput proxIn;
-        // public static boolean DEBUG = BlockTreeTermsWriter.DEBUG;
-
-        internal int skipInterval;
-        internal int maxSkipLevels;
-        internal int skipMinimum;
-
-        // private String segment;
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, string segmentSuffix)
-        {
-            bool success = false;
-            IndexInput freqIn = null;
-            IndexInput proxIn = null;
-            try
-            {
-                freqIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION), ioContext);
-                CodecUtil.CheckHeader(freqIn, FRQ_CODEC, VERSION_START, VERSION_CURRENT);
-                // TODO: hasProx should (somehow!) become codec private,
-                // but it's tricky because 1) FIS.hasProx is global (it
-                // could be all fields that have prox are written by a
-                // different codec), 2) the field may have had prox in
-                // the past but all docs w/ that field were deleted.
-                // Really we'd need to init prxOut lazily on write, and
-                // then somewhere record that we actually wrote it so we
-                // know whether to open on read:
-                if (fieldInfos.HasProx)
-                {
-                    proxIn = dir.OpenInput(IndexFileNames.SegmentFileName(segmentInfo.Name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION), ioContext);
-                    CodecUtil.CheckHeader(proxIn, PRX_CODEC, VERSION_START, VERSION_CURRENT);
-                }
-                else
-                {
-                    proxIn = null;
-                }
-                this.freqIn = freqIn;
-                this.proxIn = proxIn;
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(freqIn, proxIn);
-                }
-            }
-        }
-
-        public override void Init(IndexInput termsIn)
-        {
-            // Make sure we are talking to the matching past writer
-            CodecUtil.CheckHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT);
-
-            skipInterval = termsIn.ReadInt32();
-            maxSkipLevels = termsIn.ReadInt32();
-            skipMinimum = termsIn.ReadInt32();
-        }
-
-        // Must keep final because we do non-standard clone
-        private sealed class StandardTermState : BlockTermState
-        {
-            internal long freqOffset;
-            internal long proxOffset;
-            internal long skipOffset;
-
-            public override object Clone()
-            {
-                StandardTermState other = new StandardTermState();
-                other.CopyFrom(this);
-                return other;
-            }
-
-            public override void CopyFrom(TermState other)
-            {
-                base.CopyFrom(other);
-                StandardTermState other2 = (StandardTermState)other;
-                freqOffset = other2.freqOffset;
-                proxOffset = other2.proxOffset;
-                skipOffset = other2.skipOffset;
-            }
-
-            public override string ToString()
-            {
-                return base.ToString() + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " skipOffset=" + skipOffset;
-            }
-        }
-
-        public override BlockTermState NewTermState()
-        {
-            return new StandardTermState();
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                try
-                {
-                    if (freqIn != null)
-                    {
-                        freqIn.Dispose();
-                    }
-                }
-                finally
-                {
-                    if (proxIn != null)
-                    {
-                        proxIn.Dispose();
-                    }
-                }
-            }
-        }
-
-        public override void DecodeTerm(long[] longs, DataInput @in, FieldInfo fieldInfo, BlockTermState termState, bool absolute)
-        {
-            StandardTermState termState2 = (StandardTermState)termState;
-            // if (DEBUG) System.out.println("SPR: nextTerm seg=" + segment + " tbOrd=" + termState2.termBlockOrd + " bytesReader.fp=" + termState.bytesReader.getPosition());
-            bool isFirstTerm = termState2.TermBlockOrd == 0;
-            if (absolute)
-            {
-                termState2.freqOffset = 0;
-                termState2.proxOffset = 0;
-            }
-
-            termState2.freqOffset += @in.ReadVInt64();
-            /*
-            if (DEBUG) {
-              System.out.println("  dF=" + termState2.docFreq);
-              System.out.println("  freqFP=" + termState2.freqOffset);
-            }
-            */
-            Debug.Assert(termState2.freqOffset < freqIn.Length);
-
-            if (termState2.DocFreq >= skipMinimum)
-            {
-                termState2.skipOffset = @in.ReadVInt64();
-                // if (DEBUG) System.out.println("  skipOffset=" + termState2.skipOffset + " vs freqIn.length=" + freqIn.length());
-                Debug.Assert(termState2.freqOffset + termState2.skipOffset < freqIn.Length);
-            }
-            else
-            {
-                // undefined
-            }
-
-            if (fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0)
-            {
-                termState2.proxOffset += @in.ReadVInt64();
-                // if (DEBUG) System.out.println("  proxFP=" + termState2.proxOffset);
-            }
-        }
-
-        public override DocsEnum Docs(FieldInfo fieldInfo, BlockTermState termState, IBits liveDocs, DocsEnum reuse, DocsFlags flags)
-        {
-            if (CanReuse(reuse, liveDocs))
-            {
-                // if (DEBUG) System.out.println("SPR.docs ts=" + termState2);
-                return ((SegmentDocsEnumBase)reuse).Reset(fieldInfo, (StandardTermState)termState);
-            }
-            return NewDocsEnum(liveDocs, fieldInfo, (StandardTermState)termState);
-        }
-
-        private bool CanReuse(DocsEnum reuse, IBits liveDocs)
-        {
-            if (reuse != null && (reuse is SegmentDocsEnumBase))
-            {
-                SegmentDocsEnumBase docsEnum = (SegmentDocsEnumBase)reuse;
-                // If you are using ParellelReader, and pass in a
-                // reused DocsEnum, it could have come from another
-                // reader also using standard codec
-                if (docsEnum.startFreqIn == freqIn)
-                {
-                    // we only reuse if the the actual the incoming enum has the same liveDocs as the given liveDocs
-                    return liveDocs == docsEnum.m_liveDocs;
-                }
-            }
-            return false;
-        }
-
-        private DocsEnum NewDocsEnum(IBits liveDocs, FieldInfo fieldInfo, StandardTermState termState)
-        {
-            if (liveDocs == null)
-            {
-                return (new AllDocsSegmentDocsEnum(this, freqIn)).Reset(fieldInfo, termState);
-            }
-            else
-            {
-                return (new LiveDocsSegmentDocsEnum(this, freqIn, liveDocs)).Reset(fieldInfo, termState);
-            }
-        }
-
-        public override DocsAndPositionsEnum DocsAndPositions(FieldInfo fieldInfo, BlockTermState termState, IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags)
-        {
-            bool hasOffsets = fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
-
-            // TODO: can we optimize if FLAG_PAYLOADS / FLAG_OFFSETS
-            // isn't passed?
-
-            // TODO: refactor
-            if (fieldInfo.HasPayloads || hasOffsets)
-            {
-                SegmentFullPositionsEnum docsEnum;
-                if (reuse == null || !(reuse is SegmentFullPositionsEnum))
-                {
-                    docsEnum = new SegmentFullPositionsEnum(this, freqIn, proxIn);
-                }
-                else
-                {
-                    docsEnum = (SegmentFullPositionsEnum)reuse;
-                    if (docsEnum.startFreqIn != freqIn)
-                    {
-                        // If you are using ParellelReader, and pass in a
-                        // reused DocsEnum, it could have come from another
-                        // reader also using standard codec
-                        docsEnum = new SegmentFullPositionsEnum(this, freqIn, proxIn);
-                    }
-                }
-                return docsEnum.Reset(fieldInfo, (StandardTermState)termState, liveDocs);
-            }
-            else
-            {
-                SegmentDocsAndPositionsEnum docsEnum;
-                if (reuse == null || !(reuse is SegmentDocsAndPositionsEnum))
-                {
-                    docsEnum = new SegmentDocsAndPositionsEnum(this, freqIn, proxIn);
-                }
-                else
-                {
-                    docsEnum = (SegmentDocsAndPositionsEnum)reuse;
-                    if (docsEnum.startFreqIn != freqIn)
-                    {
-                        // If you are using ParellelReader, and pass in a
-                        // reused DocsEnum, it could have come from another
-                        // reader also using standard codec
-                        docsEnum = new SegmentDocsAndPositionsEnum(this, freqIn, proxIn);
-                    }
-                }
-                return docsEnum.Reset(fieldInfo, (StandardTermState)termState, liveDocs);
-            }
-        }
-
-        internal static readonly int BUFFERSIZE = 64;
-
-        private abstract class SegmentDocsEnumBase : DocsEnum
-        {
-            private readonly Lucene40PostingsReader outerInstance;
-
-            protected readonly int[] m_docs = new int[BUFFERSIZE];
-            protected readonly int[] m_freqs = new int[BUFFERSIZE];
-
-            internal readonly IndexInput freqIn; // reuse
-            internal readonly IndexInput startFreqIn; // reuse
-            internal Lucene40SkipListReader skipper; // reuse - lazy loaded
-
-            protected bool m_indexOmitsTF; // does current field omit term freq?
-            protected bool m_storePayloads; // does current field store payloads?
-            protected bool m_storeOffsets; // does current field store offsets?
-
-            protected int m_limit; // number of docs in this posting
-            protected int m_ord; // how many docs we've read
-            protected int m_doc; // doc we last read
-            protected int m_accum; // accumulator for doc deltas
-            protected int m_freq; // freq we last read
-            protected int m_maxBufferedDocId;
-
-            protected int m_start;
-            protected int m_count;
-
-            protected long m_freqOffset;
-            protected long m_skipOffset;
-
-            protected bool m_skipped;
-            protected internal readonly IBits m_liveDocs;
-
-            internal SegmentDocsEnumBase(Lucene40PostingsReader outerInstance, IndexInput startFreqIn, IBits liveDocs)
-            {
-                this.outerInstance = outerInstance;
-                this.startFreqIn = startFreqIn;
-                this.freqIn = (IndexInput)startFreqIn.Clone();
-                this.m_liveDocs = liveDocs;
-            }
-
-            internal virtual DocsEnum Reset(FieldInfo fieldInfo, StandardTermState termState)
-            {
-                m_indexOmitsTF = fieldInfo.IndexOptions == IndexOptions.DOCS_ONLY;
-                m_storePayloads = fieldInfo.HasPayloads;
-                m_storeOffsets = fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
-                m_freqOffset = termState.freqOffset;
-                m_skipOffset = termState.skipOffset;
-
-                // TODO: for full enum case (eg segment merging) this
-                // seek is unnecessary; maybe we can avoid in such
-                // cases
-                freqIn.Seek(termState.freqOffset);
-                m_limit = termState.DocFreq;
-                Debug.Assert(m_limit > 0);
-                m_ord = 0;
-                m_doc = -1;
-                m_accum = 0;
-                // if (DEBUG) System.out.println("  sde limit=" + limit + " freqFP=" + freqOffset);
-                m_skipped = false;
-
-                m_start = -1;
-                m_count = 0;
-                m_freq = 1;
-                if (m_indexOmitsTF)
-                {
-                    Arrays.Fill(m_freqs, 1);
-                }
-                m_maxBufferedDocId = -1;
-                return this;
-            }
-
-            public override sealed int Freq
-            {
-                get { return m_freq; }
-            }
-
-            public override sealed int DocID
-            {
-                get { return m_doc; }
-            }
-
-            public override sealed int Advance(int target)
-            {
-                // last doc in our buffer is >= target, binary search + next()
-                if (++m_start < m_count && m_maxBufferedDocId >= target)
-                {
-                    if ((m_count - m_start) > 32) // 32 seemed to be a sweetspot here so use binsearch if the pending results are a lot
-                    {
-                        m_start = BinarySearch(m_count - 1, m_start, target, m_docs);
-                        return NextDoc();
-                    }
-                    else
-                    {
-                        return LinearScan(target);
-                    }
-                }
-
-                m_start = m_count; // buffer is consumed
-
-                return m_doc = SkipTo(target);
-            }
-
-            private int BinarySearch(int hi, int low, int target, int[] docs)
-            {
-                while (low <= hi)
-                {
-                    int mid = (int)((uint)(hi + low) >> 1);
-                    int doc = docs[mid];
-                    if (doc < target)
-                    {
-                        low = mid + 1;
-                    }
-                    else if (doc > target)
-                    {
-                        hi = mid - 1;
-                    }
-                    else
-                    {
-                        low = mid;
-                        break;
-                    }
-                }
-                return low - 1;
-            }
-
-            internal int ReadFreq(IndexInput freqIn, int code)
-            {
-                if ((code & 1) != 0) // if low bit is set
-                {
-                    return 1; // freq is one
-                }
-                else
-                {
-                    return freqIn.ReadVInt32(); // else read freq
-                }
-            }
-
-            protected internal abstract int LinearScan(int scanTo);
-
-            protected internal abstract int ScanTo(int target);
-
-            protected internal int Refill()
-            {
-                int doc = NextUnreadDoc();
-                m_count = 0;
-                m_start = -1;
-                if (doc == NO_MORE_DOCS)
-                {
-                    return NO_MORE_DOCS;
-                }
-                int numDocs = Math.Min(m_docs.Length, m_limit - m_ord);
-                m_ord += numDocs;
-                if (m_indexOmitsTF)
-                {
-                    m_count = FillDocs(numDocs);
-                }
-                else
-                {
-                    m_count = FillDocsAndFreqs(numDocs);
-                }
-                m_maxBufferedDocId = m_count > 0 ? m_docs[m_count - 1] : NO_MORE_DOCS;
-                return doc;
-            }
-
-            protected internal abstract int NextUnreadDoc();
-
-            private int FillDocs(int size)
-            {
-                IndexInput freqIn = this.freqIn;
-                int[] docs = this.m_docs;
-                int docAc = m_accum;
-                for (int i = 0; i < size; i++)
-                {
-                    docAc += freqIn.ReadVInt32();
-                    docs[i] = docAc;
-                }
-                m_accum = docAc;
-                return size;
-            }
-
-            private int FillDocsAndFreqs(int size)
-            {
-                IndexInput freqIn = this.freqIn;
-                int[] docs = this.m_docs;
-                int[] freqs = this.m_freqs;
-                int docAc = m_accum;
-                for (int i = 0; i < size; i++)
-                {
-                    int code = freqIn.ReadVInt32();
-                    docAc += (int)((uint)code >> 1); // shift off low bit
-                    freqs[i] = ReadFreq(freqIn, code);
-                    docs[i] = docAc;
-                }
-                m_accum = docAc;
-                return size;
-            }
-
-            private int SkipTo(int target)
-            {
-                if ((target - outerInstance.skipInterval) >= m_accum && m_limit >= outerInstance.skipMinimum)
-                {
-                    // There are enough docs in the posting to have
-                    // skip data, and it isn't too close.
-
-                    if (skipper == null)
-                    {
-                        // this is the first time this enum has ever been used for skipping -- do lazy init
-                        skipper = new Lucene40SkipListReader((IndexInput)freqIn.Clone(), outerInstance.maxSkipLevels, outerInstance.skipInterval);
-                    }
-
-                    if (!m_skipped)
-                    {
-                        // this is the first time this posting has
-                        // skipped since reset() was called, so now we
-                        // load the skip data for this posting
-
-                        skipper.Init(m_freqOffset + m_skipOffset, m_freqOffset, 0, m_limit, m_storePayloads, m_storeOffsets);
-
-                        m_skipped = true;
-                    }
-
-                    int newOrd = skipper.SkipTo(target);
-
-                    if (newOrd > m_ord)
-                    {
-                        // Skipper moved
-
-                        m_ord = newOrd;
-                        m_accum = skipper.Doc;
-                        freqIn.Seek(skipper.FreqPointer);
-                    }
-                }
-                return ScanTo(target);
-            }
-
-            public override long GetCost()
-            {
-                return m_limit;
-            }
-        }
-
-        private sealed class AllDocsSegmentDocsEnum : SegmentDocsEnumBase
-        {
-            private readonly Lucene40PostingsReader outerInstance;
-
-            internal AllDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput startFreqIn)
-                : base(outerInstance, startFreqIn, null)
-            {
-                this.outerInstance = outerInstance;
-                Debug.Assert(m_liveDocs == null);
-            }
-
-            public override int NextDoc()
-            {
-                if (++m_start < m_count)
-                {
-                    m_freq = m_freqs[m_start];
-                    return m_doc = m_docs[m_start];
-                }
-                return m_doc = Refill();
-            }
-
-            protected internal override int LinearScan(int scanTo)
-            {
-                int[] docs = this.m_docs;
-                int upTo = m_count;
-                for (int i = m_start; i < upTo; i++)
-                {
-                    int d = docs[i];
-                    if (scanTo <= d)
-                    {
-                        m_start = i;
-                        m_freq = m_freqs[i];
-                        return m_doc = docs[i];
-                    }
-                }
-                return m_doc = Refill();
-            }
-
-            protected internal override int ScanTo(int target)
-            {
-                int docAcc = m_accum;
-                int frq = 1;
-                IndexInput freqIn = this.freqIn;
-                bool omitTF = m_indexOmitsTF;
-                int loopLimit = m_limit;
-                for (int i = m_ord; i < loopLimit; i++)
-                {
-                    int code = freqIn.ReadVInt32();
-                    if (omitTF)
-                    {
-                        docAcc += code;
-                    }
-                    else
-                    {
-                        docAcc += (int)((uint)code >> 1); // shift off low bit
-                        frq = ReadFreq(freqIn, code);
-                    }
-                    if (docAcc >= target)
-                    {
-                        m_freq = frq;
-                        m_ord = i + 1;
-                        return m_accum = docAcc;
-                    }
-                }
-                m_ord = m_limit;
-                m_freq = frq;
-                m_accum = docAcc;
-                return NO_MORE_DOCS;
-            }
-
-            protected internal override int NextUnreadDoc()
-            {
-                if (m_ord++ < m_limit)
-                {
-                    int code = freqIn.ReadVInt32();
-                    if (m_indexOmitsTF)
-                    {
-                        m_accum += code;
-                    }
-                    else
-                    {
-                        m_accum += (int)((uint)code >> 1); // shift off low bit
-                        m_freq = ReadFreq(freqIn, code);
-                    }
-                    return m_accum;
-                }
-                else
-                {
-                    return NO_MORE_DOCS;
-                }
-            }
-        }
-
-        private sealed class LiveDocsSegmentDocsEnum : SegmentDocsEnumBase
-        {
-            private readonly Lucene40PostingsReader outerInstance;
-
-            internal LiveDocsSegmentDocsEnum(Lucene40PostingsReader outerInstance, IndexInput startFreqIn, IBits liveDocs)
-                : base(outerInstance, startFreqIn, liveDocs)
-            {
-                this.outerInstance = outerInstance;
-                Debug.Assert(liveDocs != null);
-            }
-
-            public override int NextDoc()
-            {
-                IBits liveDocs = this.m_liveDocs;
-                for (int i = m_start + 1; i < m_count; i++)
-                {
-                    int d = m_docs[i];
-                    if (liveDocs.Get(d))
-                    {
-                        m_start = i;
-                        m_freq = m_freqs[i];
-                        return m_doc = d;
-                    }
-                }
-                m_start = m_count;
-                return m_doc = Refill();
-            }
-
-            protected internal override int LinearScan(int scanTo)
-            {
-                int[] docs = this.m_docs;
-                int upTo = m_count;
-                IBits liveDocs = this.m_liveDocs;
-                for (int i = m_start; i < upTo; i++)
-                {
-                    int d = docs[i];
-                    if (scanTo <= d && liveDocs.Get(d))
-                    {
-                        m_start = i;
-                        m_freq = m_freqs[i];
-                        return m_doc = docs[i];
-                    }
-                }
-                return m_doc = Refill();
-            }
-
-            protected internal override int ScanTo(int target)
-            {
-                int docAcc = m_accum;
-                int frq = 1;
-                IndexInput freqIn = this.freqIn;
-                bool omitTF = m_indexOmitsTF;
-                int loopLimit = m_limit;
-                IBits liveDocs = this.m_liveDocs;
-                for (int i = m_ord; i < loopLimit; i++)
-                {
-                    int code = freqIn.ReadVInt32();
-                    if (omitTF)
-                    {
-                        docAcc += code;
-                    }
-                    else
-                    {
-                        docAcc += (int)((uint)code >> 1); // shift off low bit
-                        frq = ReadFreq(freqIn, code);
-                    }
-                    if (docAcc >= target && liveDocs.Get(docAcc))
-                    {
-                        m_freq = frq;
-                        m_ord = i + 1;
-                        return m_accum = docAcc;
-                    }
-                }
-                m_ord = m_limit;
-                m_freq = frq;
-                m_accum = docAcc;
-                return NO_MORE_DOCS;
-            }
-
-            protected internal override int NextUnreadDoc()
-            {
-                int docAcc = m_accum;
-                int frq = 1;
-                IndexInput freqIn = this.freqIn;
-                bool omitTF = m_indexOmitsTF;
-                int loopLimit = m_limit;
-                IBits liveDocs = this.m_liveDocs;
-                for (int i = m_ord; i < loopLimit; i++)
-                {
-                    int code = freqIn.ReadVInt32();
-                    if (omitTF)
-                    {
-                        docAcc += code;
-                    }
-                    else
-                    {
-                        docAcc += (int)((uint)code >> 1); // shift off low bit
-                        frq = ReadFreq(freqIn, code);
-                    }
-                    if (liveDocs.Get(docAcc))
-                    {
-                        m_freq = frq;
-                        m_ord = i + 1;
-                        return m_accum = docAcc;
-                    }
-                }
-                m_ord = m_limit;
-                m_freq = frq;
-                m_accum = docAcc;
-                return NO_MORE_DOCS;
-            }
-        }
-
-        // TODO specialize DocsAndPosEnum too
-
-        // Decodes docs & positions. payloads nor offsets are present.
-        private sealed class SegmentDocsAndPositionsEnum : DocsAndPositionsEnum
-        {
-            private readonly Lucene40PostingsReader outerInstance;
-
-            internal readonly IndexInput startFreqIn;
-            internal readonly IndexInput freqIn;
-            internal readonly IndexInput proxIn;
-            internal int limit; // number of docs in this posting
-            internal int ord; // how many docs we've read
-            internal int doc = -1; // doc we last read
-            internal int accum; // accumulator for doc deltas
-            internal int freq; // freq we last read
-            internal int position;
-
-            internal IBits liveDocs;
-
-            internal long freqOffset;
-            internal long skipOffset;
-            internal long proxOffset;
-
-            internal int posPendingCount;
-
-            internal bool skipped;
-            internal Lucene40SkipListReader skipper;
-            internal long lazyProxPointer;
-
-            public SegmentDocsAndPositionsEnum(Lucene40PostingsReader outerInstance, IndexInput freqIn, IndexInput proxIn)
-            {
-                this.outerInstance = outerInstance;
-                startFreqIn = freqIn;
-                this.freqIn = (IndexInput)freqIn.Clone();
-                this.proxIn = (IndexInput)proxIn.Clone();
-            }
-
-            public SegmentDocsAndPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState termState, IBits liveDocs)
-            {
-                Debug.Assert(fieldInfo.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
-                Debug.Assert(!fieldInfo.HasPayloads);
-
-                this.liveDocs = liveDocs;
-
-                // TODO: for full enum case (eg segment merging) this
-                // seek is unnecessary; maybe we can avoid in such
-                // cases
-                freqIn.Seek(termState.freqOffset);
-                lazyProxPointer = termState.proxOffset;
-
-                limit = termState.DocFreq;
-                Debug.Assert(limit > 0);
-
-                ord = 0;
-                doc = -1;
-                accum = 0;
-                position = 0;
-
-                skipped = false;
-                posPendingCount = 0;
-
-                freqOffset = termState.freqOffset;
-                proxOffset = termState.proxOffset;
-                skipOffset = termState.skipOffset;
-                // if (DEBUG) System.out.println("StandardR.D&PE reset seg=" + segment + " limit=" + limit + " freqFP=" + freqOffset + " proxFP=" + proxOffset);
-
-                return this;
-            }
-
-            public override int NextDoc()
-            {
-                // if (DEBUG) System.out.println("SPR.nextDoc seg=" + segment + " freqIn.fp=" + freqIn.getFilePointer());
-                while (true)
-                {
-                    if (ord == limit)
-                    {
-                        // if (DEBUG) System.out.println("  return END");
-                        return doc = NO_MORE_DOCS;
-                    }
-
-                    ord++;
-
-                    // Decode next doc/freq pair
-                    int code = freqIn.ReadVInt32();
-
-                    accum += (int)((uint)code >> 1); // shift off low bit
-                    if ((code & 1) != 0) // if low bit is set
-                    {
-                        freq = 1; // freq is one
-                    }
-                    else
-                    {
-                        freq = freqIn.ReadVInt32(); // else read freq
-                    }
-                    posPendingCount += freq;
-
-                    if (liveDocs == null || liveDocs.Get(accum))
-                    {
-                        break;
-                    }
-                }
-
-                position = 0;
-
-                // if (DEBUG) System.out.println("  return doc=" + doc);
-                return (doc = accum);
-            }
-
-            public override int DocID
-            {
-                get { return doc; }
-            }
-
-            public override int Freq
-            {
-                get { return freq; }
-            }
-
-            public override int Advance(int target)
-            {
-                //System.out.println("StandardR.D&PE advance target=" + target);
-
-                if ((target - outerInstance.skipInterval) >= doc && limit >= outerInstance.skipMinimum)
-                {
-                    // There are enough docs in the posting to have
-                    // skip data, and it isn't too close
-
-                    if (skipper == null)
-                    {
-                        // this is the first time this enum has ever been used for skipping -- do lazy init
-                        skipper = new Lucene40SkipListReader((IndexInput)freqIn.Clone(), outerInstance.maxSkipLevels, outerInstance.skipInterval);
-                    }
-
-                    if (!skipped)
-                    {
-                        // this is the first time this posting has
-                        // skipped, since reset() was called, so now we
-                        // load the skip data for this posting
-
-                        skipper.Init(freqOffset + skipOffset, freqOffset, proxOffset, limit, false, false);
-
-                        skipped = true;
-                    }
-
-                    int newOrd = skipper.SkipTo(target);
-
-                    if (newOrd > ord)
-                    {
-                        // Skipper moved
-                        ord = newOrd;
-                        doc = accum = skipper.Doc;
-                        freqIn.Seek(skipper.FreqPointer);
-                        lazyProxPointer = skipper.ProxPointer;
-                        posPendingCount = 0;
-                        position = 0;
-                    }
-                }
-
-                // Now, linear scan for the rest:
-                do
-                {
-                    NextDoc();
-                } while (target > doc);
-
-                return doc;
-            }
-
-            public override int NextPosition()
-            {
-                if (lazyProxPointer != -1)
-                {
-                    proxIn.Seek(lazyProxPointer);
-                    lazyProxPointer = -1;
-                }
-
-                // scan over any docs that were iterated without their positions
-                if (posPendingCount > freq)
-                {
-                    position = 0;
-                    while (posPendingCount != freq)
-                    {
-                        if ((proxIn.ReadByte() & 0x80) == 0)
-                        {
-                            posPendingCount--;
-                        }
-                    }
-                }
-
-                position += proxIn.ReadVInt32();
-
-                posPendingCount--;
-
-                Debug.Assert(posPendingCount >= 0, "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount);
-
-                return position;
-            }
-
-            public override int StartOffset
-            {
-                get { return -1; }
-            }
-
-            public override int EndOffset
-            {
-                get { return -1; }
-            }
-
-            /// <summary>
-            /// Returns the payload at this position, or null if no
-            ///  payload was indexed.
-            /// </summary>
-            public override BytesRef GetPayload()
-            {
-                return null;
-            }
-
-            public override long GetCost()
-            {
-                return limit;
-            }
-        }
-
-        // Decodes docs & positions & (payloads and/or offsets)
-        private class SegmentFullPositionsEnum : DocsAndPositionsEnum
-        {
-            private readonly Lucene40PostingsReader outerInstance;
-
-            internal readonly IndexInput startFreqIn;
-            private readonly IndexInput freqIn;
-            private readonly IndexInput proxIn;
-
-            internal int limit; // number of docs in this posting
-            internal int ord; // how many docs we've read
-            internal int doc = -1; // doc we last read
-            internal int accum; // accumulator for doc deltas
-            internal int freq; // freq we last read
-            internal int position;
-
-            internal IBits liveDocs;
-
-            internal long freqOffset;
-            internal long skipOffset;
-            internal long proxOffset;
-
-            internal int posPendingCount;
-            internal int payloadLength;
-            internal bool payloadPending;
-
-            internal bool skipped;
-            internal Lucene40SkipListReader skipper;
-            internal BytesRef payload;
-            internal long lazyProxPointer;
-
-            internal bool storePayloads;
-            internal bool storeOffsets;
-
-            internal int offsetLength;
-            internal int startOffset;
-
-            public SegmentFullPositionsEnum(Lucene40PostingsReader outerInstance, IndexInput freqIn, IndexInput proxIn)
-            {
-                this.outerInstance = outerInstance;
-                startFreqIn = freqIn;
-                this.freqIn = (IndexInput)freqIn.Clone();
-                this.proxIn = (IndexInput)proxIn.Clone();
-            }
-
-            public virtual SegmentFullPositionsEnum Reset(FieldInfo fieldInfo, StandardTermState termState, IBits liveDocs)
-            {
-                storeOffsets = fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
-                storePayloads = fieldInfo.HasPayloads;
-                Debug.Assert(fieldInfo.IndexOptions.CompareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0);
-                Debug.Assert(storePayloads || storeOffsets);
-                if (payload == null)
-                {
-                    payload = new BytesRef();
-                    payload.Bytes = new byte[1];
-                }
-
-                this.liveDocs = liveDocs;
-
-                // TODO: for full enum case (eg segment merging) this
-                // seek is unnecessary; maybe we can avoid in such
-                // cases
-                freqIn.Seek(termState.freqOffset);
-                lazyProxPointer = termState.proxOffset;
-
-                limit = termState.DocFreq;
-                ord = 0;
-                doc = -1;
-                accum = 0;
-                position = 0;
-                startOffset = 0;
-
-                skipped = false;
-                posPendingCount = 0;
-                payloadPending = false;
-
-                freqOffset = termState.freqOffset;
-                proxOffset = termState.proxOffset;
-                skipOffset = termState.skipOffset;
-                //System.out.println("StandardR.D&PE reset seg=" + segment + " limit=" + limit + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " this=" + this);
-
-                return this;
-            }
-
-            public override int NextDoc()
-            {
-                while (true)
-                {
-                    if (ord == limit)
-                    {
-                        //System.out.println("StandardR.D&PE seg=" + segment + " nextDoc return doc=END");
-                        return doc = NO_MORE_DOCS;
-                    }
-
-                    ord++;
-
-                    // Decode next doc/freq pair
-                    int code = freqIn.ReadVInt32();
-
-                    accum += (int)((uint)code >> 1); // shift off low bit
-                    if ((code & 1) != 0) // if low bit is set
-                    {
-                        freq = 1; // freq is one
-                    }
-                    else
-                    {
-                        freq = freqIn.ReadVInt32(); // else read freq
-                    }
-                    posPendingCount += freq;
-
-                    if (liveDocs == null || liveDocs.Get(accum))
-                    {
-                        break;
-                    }
-                }
-
-                position = 0;
-                startOffset = 0;
-
-                //System.out.println("StandardR.D&PE nextDoc seg=" + segment + " return doc=" + doc);
-                return (doc = accum);
-            }
-
-            public override int DocID
-            {
-                get { return doc; }
-            }
-
-            public override int Freq
-            {
-                get { return freq; }
-            }
-
-            public override int Advance(int target)
-            {
-                //System.out.println("StandardR.D&PE advance seg=" + segment + " target=" + target + " this=" + this);
-
-                if ((target - outerInstance.skipInterval) >= doc && limit >= outerInstance.skipMinimum)
-                {
-                    // There are enough docs in the posting to have
-                    // skip data, and it isn't too close
-
-                    if (skipper == null)
-                    {
-                        // this is the first time this enum has ever been used for skipping -- do lazy init
-                        skipper = new Lucene40SkipListReader((IndexInput)freqIn.Clone(), outerInstance.maxSkipLevels, outerInstance.skipInterval);
-                    }
-
-                    if (!skipped)
-                    {
-                        // this is the first time this posting has
-                        // skipped, since reset() was called, so now we
-                        // load the skip data for this posting
-                        //System.out.println("  init skipper freqOffset=" + freqOffset + " skipOffset=" + skipOffset + " vs len=" + freqIn.length());
-                        skipper.Init(freqOffset + skipOffset, freqOffset, proxOffset, limit, storePayloads, storeOffsets);
-
-                        skipped = true;
-                    }
-
-                    int newOrd = skipper.SkipTo(target);
-
-                    if (newOrd > ord)
-                    {
-                        // Skipper moved
-                        ord = newOrd;
-                        doc = accum = skipper.Doc;
-                        freqIn.Seek(skipper.FreqPointer);
-                        lazyProxPointer = skipper.ProxPointer;
-                        posPendingCount = 0;
-                        position = 0;
-                        startOffset = 0;
-                        payloadPending = false;
-                        payloadLength = skipper.PayloadLength;
-                        offsetLength = skipper.OffsetLength;
-                    }
-                }
-
-                // Now, linear scan for the rest:
-                do
-                {
-                    NextDoc();
-                } while (target > doc);
-
-                return doc;
-            }
-
-            public override int NextPosition()
-            {
-                if (lazyProxPointer != -1)
-                {
-                    proxIn.Seek(lazyProxPointer);
-                    lazyProxPointer = -1;
-                }
-
-                if (payloadPending && payloadLength > 0)
-                {
-                    // payload of last position was never retrieved -- skip it
-                    proxIn.Seek(proxIn.GetFilePointer() + payloadLength);
-                    payloadPending = false;
-                }
-
-                // scan over any docs that were iterated without their positions
-                while (posPendingCount > freq)
-                {
-                    int code = proxIn.ReadVInt32();
-
-                    if (storePayloads)
-                    {
-                        if ((code & 1) != 0)
-                        {
-                            // new payload length
-                            payloadLength = proxIn.ReadVInt32();
-                            Debug.Assert(payloadLength >= 0);
-                        }
-                        Debug.Assert(payloadLength != -1);
-                    }
-
-                    if (storeOffsets)
-                    {
-                        if ((proxIn.ReadVInt32() & 1) != 0)
-                        {
-                            // new offset length
-                            offsetLength = proxIn.ReadVInt32();
-                        }
-                    }
-
-                    if (storePayloads)
-                    {
-                        proxIn.Seek(proxIn.GetFilePointer() + payloadLength);
-                    }
-
-                    posPendingCount--;
-                    position = 0;
-                    startOffset = 0;
-                    payloadPending = false;
-                    //System.out.println("StandardR.D&PE skipPos");
-                }
-
-                // read next position
-                if (payloadPending && payloadLength > 0)
-                {
-                    // payload wasn't retrieved for last position
-                    proxIn.Seek(proxIn.GetFilePointer() + payloadLength);
-                }
-
-                int code_ = proxIn.ReadVInt32();
-                if (storePayloads)
-                {
-                    if ((code_ & 1) != 0)
-                    {
-                        // new payload length
-                        payloadLength = proxIn.ReadVInt32();
-                        Debug.Assert(payloadLength >= 0);
-                    }
-                    Debug.Assert(payloadLength != -1);
-
-                    payloadPending = true;
-                    code_ = (int)((uint)code_ >> 1);
-                }
-                position += code_;
-
-                if (storeOffsets)
-                {
-                    int offsetCode = proxIn.ReadVInt32();
-                    if ((offsetCode & 1) != 0)
-                    {
-                        // new offset length
-                        offsetLength = proxIn.ReadVInt32();
-                    }
-                    startOffset += (int)((uint)offsetCode >> 1);
-                }
-
-                posPendingCount--;
-
-                Debug.Assert(posPendingCount >= 0, "nextPosition() was called too many times (more than freq() times) posPendingCount=" + posPendingCount);
-
-                //System.out.println("StandardR.D&PE nextPos   return pos=" + position);
-                return position;
-            }
-
-            public override int StartOffset
-            {
-                get { return storeOffsets ? startOffset : -1; }
-            }
-
-            public override int EndOffset
-            {
-                get { return storeOffsets ? startOffset + offsetLength : -1; }
-            }
-
-            /// <summary>
-            /// Returns the payload at this position, or null if no
-            ///  payload was indexed.
-            /// </summary>
-            public override BytesRef GetPayload()
-            {
-                if (storePayloads)
-                {
-                    if (payloadLength <= 0)
-                    {
-                        return null;
-                    }
-                    Debug.Assert(lazyProxPointer == -1);
-                    Debug.Assert(posPendingCount < freq);
-
-                    if (payloadPending)
-                    {
-                        if (payloadLength > payload.Bytes.Length)
-                        {
-                            payload.Grow(payloadLength);
-                        }
-
-                        proxIn.ReadBytes(payload.Bytes, 0, payloadLength);
-                        payload.Length = payloadLength;
-                        payloadPending = false;
-                    }
-
-                    return payload;
-                }
-                else
-                {
-                    return null;
-                }
-            }
-
-            public override long GetCost()
-            {
-                return limit;
-            }
-        }
-
-        public override long RamBytesUsed()
-        {
-            return 0;
-        }
-
-        public override void CheckIntegrity()
-        {
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs
deleted file mode 100644
index c3ce3c9..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoFormat.cs
+++ /dev/null
@@ -1,108 +0,0 @@
-using System;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    // javadocs
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo; // javadocs
-
-    // javadocs
-    // javadocs
-
-    /// <summary>
-    /// Lucene 4.0 Segment info format.
-    /// <p>
-    /// Files:
-    /// <ul>
-    ///   <li><tt>.si</tt>: Header, SegVersion, SegSize, IsCompoundFile, Diagnostics, Attributes, Files
-    /// </ul>
-    /// </p>
-    /// Data types:
-    /// <p>
-    /// <ul>
-    ///   <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    ///   <li>SegSize --&gt; <seealso cref="DataOutput#writeInt Int32"/></li>
-    ///   <li>SegVersion --&gt; <seealso cref="DataOutput#writeString String"/></li>
-    ///   <li>Files --&gt; <seealso cref="DataOutput#writeStringSet Set&lt;String&gt;"/></li>
-    ///   <li>Diagnostics, Attributes --&gt; <seealso cref="DataOutput#writeStringStringMap Map&lt;String,String&gt;"/></li>
-    ///   <li>IsCompoundFile --&gt; <seealso cref="DataOutput#writeByte Int8"/></li>
-    /// </ul>
-    /// </p>
-    /// Field Descriptions:
-    /// <p>
-    /// <ul>
-    ///   <li>SegVersion is the code version that created the segment.</li>
-    ///   <li>SegSize is the number of documents contained in the segment index.</li>
-    ///   <li>IsCompoundFile records whether the segment is written as a compound file or
-    ///       not. If this is -1, the segment is not a compound file. If it is 1, the segment
-    ///       is a compound file.</li>
-    ///   <li>Checksum contains the CRC32 checksum of all bytes in the segments_N file up
-    ///       until the checksum. this is used to verify integrity of the file on opening the
-    ///       index.</li>
-    ///   <li>The Diagnostics Map is privately written by <seealso cref="IndexWriter"/>, as a debugging aid,
-    ///       for each segment it creates. It includes metadata like the current Lucene
-    ///       version, OS, Java version, why the segment was created (merge, flush,
-    ///       addIndexes), etc.</li>
-    ///   <li>Attributes: a key-value map of codec-private attributes.</li>
-    ///   <li>Files is a list of files referred to by this segment.</li>
-    /// </ul>
-    /// </p>
-    /// </summary>
-    /// <seealso cref= SegmentInfos
-    /// @lucene.experimental </seealso>
-    /// @deprecated Only for reading old 4.0-4.5 segments, and supporting IndexWriter.addIndexes
-    [Obsolete("Only for reading old 4.0-4.5 segments, and supporting IndexWriter.AddIndexes()")]
-    public class Lucene40SegmentInfoFormat : SegmentInfoFormat
-    {
-        private readonly SegmentInfoReader reader = new Lucene40SegmentInfoReader();
-        private readonly SegmentInfoWriter writer = new Lucene40SegmentInfoWriter();
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40SegmentInfoFormat()
-        {
-        }
-
-        public override SegmentInfoReader SegmentInfoReader
-        {
-            get
-            {
-                return reader;
-            }
-        }
-
-        // we must unfortunately support write, to allow addIndexes to write a new .si with rewritten filenames:
-        // see LUCENE-5377
-        public override SegmentInfoWriter SegmentInfoWriter
-        {
-            get
-            {
-                return writer;
-            }
-        }
-
-        /// <summary>
-        /// File extension used to store <seealso cref="SegmentInfo"/>. </summary>
-        public readonly static string SI_EXTENSION = "si";
-
-        internal readonly static string CODEC_NAME = "Lucene40SegmentInfo";
-        internal readonly static int VERSION_START = 0;
-        internal readonly static int VERSION_CURRENT = VERSION_START;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoReader.cs
deleted file mode 100644
index aec213d..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoReader.cs
+++ /dev/null
@@ -1,87 +0,0 @@
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using Directory = Lucene.Net.Store.Directory;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-    /// <summary>
-    /// Lucene 4.0 implementation of <seealso cref="SegmentInfoReader"/>.
-    /// </summary>
-    /// <seealso cref= Lucene40SegmentInfoFormat
-    /// @lucene.experimental </seealso>
-    /// @deprecated Only for reading old 4.0-4.5 segments
-    [Obsolete("Only for reading old 4.0-4.5 segments")]
-    public class Lucene40SegmentInfoReader : SegmentInfoReader
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40SegmentInfoReader()
-        {
-        }
-
-        public override SegmentInfo Read(Directory dir, string segment, IOContext context)
-        {
-            string fileName = IndexFileNames.SegmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
-            IndexInput input = dir.OpenInput(fileName, context);
-            bool success = false;
-            try
-            {
-                CodecUtil.CheckHeader(input, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_START, Lucene40SegmentInfoFormat.VERSION_CURRENT);
-                string version = input.ReadString();
-                int docCount = input.ReadInt32();
-                if (docCount < 0)
-                {
-                    throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")");
-                }
-                bool isCompoundFile = input.ReadByte() == SegmentInfo.YES;
-                IDictionary<string, string> diagnostics = input.ReadStringStringMap();
-                input.ReadStringStringMap(); // read deprecated attributes
-                ISet<string> files = input.ReadStringSet();
-
-                CodecUtil.CheckEOF(input);
-
-                SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics);
-                si.SetFiles(files);
-
-                success = true;
-
-                return si;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(input);
-                }
-                else
-                {
-                    input.Dispose();
-                }
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs
deleted file mode 100644
index a2d2925..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SegmentInfoWriter.cs
+++ /dev/null
@@ -1,83 +0,0 @@
-using Lucene.Net.Support;
-using System;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-    /// <summary>
-    /// Lucene 4.0 implementation of <seealso cref="SegmentInfoWriter"/>.
-    /// </summary>
-    /// <seealso cref= Lucene40SegmentInfoFormat
-    /// @lucene.experimental </seealso>
-    [Obsolete]
-    public class Lucene40SegmentInfoWriter : SegmentInfoWriter
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40SegmentInfoWriter()
-        {
-        }
-
-        /// <summary>
-        /// Save a single segment's info. </summary>
-        public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext)
-        {
-            string fileName = IndexFileNames.SegmentFileName(si.Name, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
-            si.AddFile(fileName);
-
-            IndexOutput output = dir.CreateOutput(fileName, ioContext);
-
-            bool success = false;
-            try
-            {
-                CodecUtil.WriteHeader(output, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_CURRENT);
-                // Write the Lucene version that created this segment, since 3.1
-                output.WriteString(si.Version);
-                output.WriteInt32(si.DocCount);
-
-                output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO));
-                output.WriteStringStringMap(si.Diagnostics);
-                output.WriteStringStringMap(Collections.EmptyMap<string, string>());
-                output.WriteStringSet(si.GetFiles());
-
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(output);
-                    si.Dir.DeleteFile(fileName);
-                }
-                else
-                {
-                    output.Dispose();
-                }
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SkipListReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SkipListReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SkipListReader.cs
deleted file mode 100644
index cacafe5..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40SkipListReader.cs
+++ /dev/null
@@ -1,176 +0,0 @@
-using Lucene.Net.Support;
-using System;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using IndexInput = Lucene.Net.Store.IndexInput;
-
-    /// <summary>
-    /// Implements the skip list reader for the 4.0 posting list format
-    /// that stores positions and payloads.
-    /// </summary>
-    /// <seealso cref= Lucene40PostingsFormat </seealso>
-    /// @deprecated Only for reading old 4.0 segments
-    [Obsolete("Only for reading old 4.0 segments")]
-    public class Lucene40SkipListReader : MultiLevelSkipListReader
-    {
-        private bool currentFieldStoresPayloads;
-        private bool currentFieldStoresOffsets;
-        private long[] freqPointer;
-        private long[] proxPointer;
-        private int[] payloadLength;
-        private int[] offsetLength;
-
-        private long lastFreqPointer;
-        private long lastProxPointer;
-        private int lastPayloadLength;
-        private int lastOffsetLength;
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40SkipListReader(IndexInput skipStream, int maxSkipLevels, int skipInterval)
-            : base(skipStream, maxSkipLevels, skipInterval)
-        {
-            freqPointer = new long[maxSkipLevels];
-            proxPointer = new long[maxSkipLevels];
-            payloadLength = new int[maxSkipLevels];
-            offsetLength = new int[maxSkipLevels];
-        }
-
-        /// <summary>
-        /// Per-term initialization. </summary>
-        public virtual void Init(long skipPointer, long freqBasePointer, long proxBasePointer, int df, bool storesPayloads, bool storesOffsets)
-        {
-            base.Init(skipPointer, df);
-            this.currentFieldStoresPayloads = storesPayloads;
-            this.currentFieldStoresOffsets = storesOffsets;
-            lastFreqPointer = freqBasePointer;
-            lastProxPointer = proxBasePointer;
-
-            Arrays.Fill(freqPointer, freqBasePointer);
-            Arrays.Fill(proxPointer, proxBasePointer);
-            Arrays.Fill(payloadLength, 0);
-            Arrays.Fill(offsetLength, 0);
-        }
-
-        /// <summary>
-        /// Returns the freq pointer of the doc to which the last call of
-        /// <seealso cref="MultiLevelSkipListReader#skipTo(int)"/> has skipped.
-        /// </summary>
-        public virtual long FreqPointer
-        {
-            get
-            {
-                return lastFreqPointer;
-            }
-        }
-
-        /// <summary>
-        /// Returns the prox pointer of the doc to which the last call of
-        /// <seealso cref="MultiLevelSkipListReader#skipTo(int)"/> has skipped.
-        /// </summary>
-        public virtual long ProxPointer
-        {
-            get
-            {
-                return lastProxPointer;
-            }
-        }
-
-        /// <summary>
-        /// Returns the payload length of the payload stored just before
-        /// the doc to which the last call of <seealso cref="MultiLevelSkipListReader#skipTo(int)"/>
-        /// has skipped.
-        /// </summary>
-        public virtual int PayloadLength
-        {
-            get
-            {
-                return lastPayloadLength;
-            }
-        }
-
-        /// <summary>
-        /// Returns the offset length (endOffset-startOffset) of the position stored just before
-        /// the doc to which the last call of <seealso cref="MultiLevelSkipListReader#skipTo(int)"/>
-        /// has skipped.
-        /// </summary>
-        public virtual int OffsetLength
-        {
-            get
-            {
-                return lastOffsetLength;
-            }
-        }
-
-        protected override void SeekChild(int level)
-        {
-            base.SeekChild(level);
-            freqPointer[level] = lastFreqPointer;
-            proxPointer[level] = lastProxPointer;
-            payloadLength[level] = lastPayloadLength;
-            offsetLength[level] = lastOffsetLength;
-        }
-
-        protected override void SetLastSkipData(int level)
-        {
-            base.SetLastSkipData(level);
-            lastFreqPointer = freqPointer[level];
-            lastProxPointer = proxPointer[level];
-            lastPayloadLength = payloadLength[level];
-            lastOffsetLength = offsetLength[level];
-        }
-
-        protected override int ReadSkipData(int level, IndexInput skipStream)
-        {
-            int delta;
-            if (currentFieldStoresPayloads || currentFieldStoresOffsets)
-            {
-                // the current field stores payloads and/or offsets.
-                // if the doc delta is odd then we have
-                // to read the current payload/offset lengths
-                // because it differs from the lengths of the
-                // previous payload/offset
-                delta = skipStream.ReadVInt32();
-                if ((delta & 1) != 0)
-                {
-                    if (currentFieldStoresPayloads)
-                    {
-                        payloadLength[level] = skipStream.ReadVInt32();
-                    }
-                    if (currentFieldStoresOffsets)
-                    {
-                        offsetLength[level] = skipStream.ReadVInt32();
-                    }
-                }
-                delta = (int)((uint)delta >> 1);
-            }
-            else
-            {
-                delta = skipStream.ReadVInt32();
-            }
-
-            freqPointer[level] += skipStream.ReadVInt32();
-            proxPointer[level] += skipStream.ReadVInt32();
-
-            return delta;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs
deleted file mode 100644
index 81fea3e..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsFormat.cs
+++ /dev/null
@@ -1,95 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    // javadocs
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-    /// <summary>
-    /// Lucene 4.0 Stored Fields Format.
-    /// <p>Stored fields are represented by two files:</p>
-    /// <ol>
-    /// <li><a name="field_index" id="field_index"></a>
-    /// <p>The field index, or <tt>.fdx</tt> file.</p>
-    /// <p>this is used to find the location within the field data file of the fields
-    /// of a particular document. Because it contains fixed-length data, this file may
-    /// be easily randomly accessed. The position of document <i>n</i> 's field data is
-    /// the <seealso cref="DataOutput#writeLong Uint64"/> at <i>n*8</i> in this file.</p>
-    /// <p>this contains, for each document, a pointer to its field data, as
-    /// follows:</p>
-    /// <ul>
-    /// <li>FieldIndex (.fdx) --&gt; &lt;Header&gt;, &lt;FieldValuesPosition&gt; <sup>SegSize</sup></li>
-    /// <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    /// <li>FieldValuesPosition --&gt; <seealso cref="DataOutput#writeLong Uint64"/></li>
-    /// </ul>
-    /// </li>
-    /// <li>
-    /// <p><a name="field_data" id="field_data"></a>The field data, or <tt>.fdt</tt> file.</p>
-    /// <p>this contains the stored fields of each document, as follows:</p>
-    /// <ul>
-    /// <li>FieldData (.fdt) --&gt; &lt;Header&gt;, &lt;DocFieldData&gt; <sup>SegSize</sup></li>
-    /// <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    /// <li>DocFieldData --&gt; FieldCount, &lt;FieldNum, Bits, Value&gt;
-    /// <sup>FieldCount</sup></li>
-    /// <li>FieldCount --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// <li>FieldNum --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// <li>Bits --&gt; <seealso cref="DataOutput#writeByte Byte"/></li>
-    /// <ul>
-    /// <li>low order bit reserved.</li>
-    /// <li>second bit is one for fields containing binary data</li>
-    /// <li>third bit reserved.</li>
-    /// <li>4th to 6th bit (mask: 0x7&lt;&lt;3) define the type of a numeric field:
-    /// <ul>
-    /// <li>all bits in mask are cleared if no numeric field at all</li>
-    /// <li>1&lt;&lt;3: Value is Int</li>
-    /// <li>2&lt;&lt;3: Value is Long</li>
-    /// <li>3&lt;&lt;3: Value is Int as Float (as of <seealso cref="Float#intBitsToFloat(int)"/></li>
-    /// <li>4&lt;&lt;3: Value is Long as Double (as of <seealso cref="Double#longBitsToDouble(long)"/></li>
-    /// </ul>
-    /// </li>
-    /// </ul>
-    /// <li>Value --&gt; String | BinaryValue | Int | Long (depending on Bits)</li>
-    /// <li>BinaryValue --&gt; ValueSize, &lt;<seealso cref="DataOutput#writeByte Byte"/>&gt;^ValueSize</li>
-    /// <li>ValueSize --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// </li>
-    /// </ul>
-    /// </ol>
-    /// @lucene.experimental
-    /// </summary>
-    public class Lucene40StoredFieldsFormat : StoredFieldsFormat
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40StoredFieldsFormat()
-        {
-        }
-
-        public override StoredFieldsReader FieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context)
-        {
-            return new Lucene40StoredFieldsReader(directory, si, fn, context);
-        }
-
-        public override StoredFieldsWriter FieldsWriter(Directory directory, SegmentInfo si, IOContext context)
-        {
-            return new Lucene40StoredFieldsWriter(directory, si.Name, context);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
deleted file mode 100644
index f3064fe..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsReader.cs
+++ /dev/null
@@ -1,309 +0,0 @@
-using Lucene.Net.Support;
-using System;
-using System.Diagnostics;
-using System.Reflection;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-    using StoredFieldVisitor = Lucene.Net.Index.StoredFieldVisitor;
-
-    /// <summary>
-    /// Class responsible for access to stored document fields.
-    /// <p/>
-    /// It uses &lt;segment&gt;.fdt and &lt;segment&gt;.fdx; files.
-    /// </summary>
-    /// <seealso cref= Lucene40StoredFieldsFormat
-    /// @lucene.internal </seealso>
-    public sealed class Lucene40StoredFieldsReader : StoredFieldsReader, IDisposable
-    {
-        private readonly FieldInfos fieldInfos;
-        private readonly IndexInput fieldsStream;
-        private readonly IndexInput indexStream;
-        private int numTotalDocs;
-        private int size;
-        private bool closed;
-
-        /// <summary>
-        /// Returns a cloned FieldsReader that shares open
-        ///  IndexInputs with the original one.  It is the caller's
-        ///  job not to close the original FieldsReader until all
-        ///  clones are called (eg, currently SegmentReader manages
-        ///  this logic).
-        /// </summary>
-        public override object Clone()
-        {
-            EnsureOpen();
-            return new Lucene40StoredFieldsReader(fieldInfos, numTotalDocs, size, (IndexInput)fieldsStream.Clone(), (IndexInput)indexStream.Clone());
-        }
-
-        /// <summary>
-        /// Used only by clone. </summary>
-        private Lucene40StoredFieldsReader(FieldInfos fieldInfos, int numTotalDocs, int size, IndexInput fieldsStream, IndexInput indexStream)
-        {
-            this.fieldInfos = fieldInfos;
-            this.numTotalDocs = numTotalDocs;
-            this.size = size;
-            this.fieldsStream = fieldsStream;
-            this.indexStream = indexStream;
-        }
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40StoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context)
-        {
-            string segment = si.Name;
-            bool success = false;
-            fieldInfos = fn;
-            try
-            {
-                fieldsStream = d.OpenInput(IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_EXTENSION), context);
-                string indexStreamFN = IndexFileNames.SegmentFileName(segment, "", Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
-                indexStream = d.OpenInput(indexStreamFN, context);
-
-                CodecUtil.CheckHeader(indexStream, Lucene40StoredFieldsWriter.CODEC_NAME_IDX, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
-                CodecUtil.CheckHeader(fieldsStream, Lucene40StoredFieldsWriter.CODEC_NAME_DAT, Lucene40StoredFieldsWriter.VERSION_START, Lucene40StoredFieldsWriter.VERSION_CURRENT);
-                Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_DAT == fieldsStream.GetFilePointer());
-                Debug.Assert(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX == indexStream.GetFilePointer());
-                long indexSize = indexStream.Length - Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX;
-                this.size = (int)(indexSize >> 3);
-                // Verify two sources of "maxDoc" agree:
-                if (this.size != si.DocCount)
-                {
-                    throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + this.size + " but segmentInfo shows " + si.DocCount);
-                }
-                numTotalDocs = (int)(indexSize >> 3);
-                success = true;
-            }
-            finally
-            {
-                // With lock-less commits, it's entirely possible (and
-                // fine) to hit a FileNotFound exception above. In
-                // this case, we want to explicitly close any subset
-                // of things that were opened so that we don't have to
-                // wait for a GC to do so.
-                if (!success)
-                {
-                    try
-                    {
-                        Dispose();
-                    } // ensure we throw our original exception
-                    catch (Exception)
-                    {
-                    }
-                }
-            }
-        }
-
-        /// <exception cref="ObjectDisposedException"> if this FieldsReader is closed </exception>
-        private void EnsureOpen()
-        {
-            if (closed)
-            {
-                throw new ObjectDisposedException(this.GetType().GetTypeInfo().FullName, "this FieldsReader is closed");
-            }
-        }
-
-        /// <summary>
-        /// Closes the underlying <seealso cref="Lucene.Net.Store.IndexInput"/> streams.
-        /// this means that the Fields values will not be accessible.
-        /// </summary>
-        /// <exception cref="IOException"> If an I/O error occurs </exception>
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                if (!closed)
-                {
-                    IOUtils.Close(fieldsStream, indexStream);
-                    closed = true;
-                }
-            }
-        }
-
-        /// <summary>
-        /// Returns number of documents.
-        /// NOTE: This was size() in Lucene.
-        /// </summary>
-        public int Count
-        {
-            get { return size; }
-        }
-
-        private void SeekIndex(int docID)
-        {
-            indexStream.Seek(Lucene40StoredFieldsWriter.HEADER_LENGTH_IDX + docID * 8L);
-        }
-
-        public override void VisitDocument(int n, StoredFieldVisitor visitor)
-        {
-            SeekIndex(n);
-            fieldsStream.Seek(indexStream.ReadInt64());
-
-            int numFields = fieldsStream.ReadVInt32();
-            for (int fieldIDX = 0; fieldIDX < numFields; fieldIDX++)
-            {
-                int fieldNumber = fieldsStream.ReadVInt32();
-                FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
-
-                int bits = fieldsStream.ReadByte() & 0xFF;
-                Debug.Assert(bits <= (Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK | Lucene40StoredFieldsWriter.FIELD_IS_BINARY), "bits=" + bits.ToString("x"));
-
-                switch (visitor.NeedsField(fieldInfo))
-                {
-                    case StoredFieldVisitor.Status.YES:
-                        ReadField(visitor, fieldInfo, bits);
-                        break;
-
-                    case StoredFieldVisitor.Status.NO:
-                        SkipField(bits);
-                        break;
-
-                    case StoredFieldVisitor.Status.STOP:
-                        return;
-                }
-            }
-        }
-
-        private void ReadField(StoredFieldVisitor visitor, FieldInfo info, int bits)
-        {
-            int numeric = bits & Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK;
-            if (numeric != 0)
-            {
-                switch (numeric)
-                {
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_INT:
-                        visitor.Int32Field(info, fieldsStream.ReadInt32());
-                        return;
-
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_LONG:
-                        visitor.Int64Field(info, fieldsStream.ReadInt64());
-                        return;
-
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_FLOAT:
-                        visitor.SingleField(info, Number.Int32BitsToSingle(fieldsStream.ReadInt32()));
-                        return;
-
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_DOUBLE:
-                        visitor.DoubleField(info, BitConverter.Int64BitsToDouble(fieldsStream.ReadInt64()));
-                        return;
-
-                    default:
-                        throw new CorruptIndexException("Invalid numeric type: " + numeric.ToString("x"));
-                }
-            }
-            else
-            {
-                int length = fieldsStream.ReadVInt32();
-                var bytes = new byte[length];
-                fieldsStream.ReadBytes(bytes, 0, length);
-                if ((bits & Lucene40StoredFieldsWriter.FIELD_IS_BINARY) != 0)
-                {
-                    visitor.BinaryField(info, bytes);
-                }
-                else
-                {
-#pragma warning disable 612, 618
-                    visitor.StringField(info, IOUtils.CHARSET_UTF_8.GetString(bytes));
-#pragma warning restore 612, 618
-                }
-            }
-        }
-
-        private void SkipField(int bits)
-        {
-            int numeric = bits & Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_MASK;
-            if (numeric != 0)
-            {
-                switch (numeric)
-                {
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_INT:
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_FLOAT:
-                        fieldsStream.ReadInt32();
-                        return;
-
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_LONG:
-                    case Lucene40StoredFieldsWriter.FIELD_IS_NUMERIC_DOUBLE:
-                        fieldsStream.ReadInt64();
-                        return;
-
-                    default:
-                        throw new CorruptIndexException("Invalid numeric type: " + numeric.ToString("x"));
-                }
-            }
-            else
-            {
-                int length = fieldsStream.ReadVInt32();
-                fieldsStream.Seek(fieldsStream.GetFilePointer() + length);
-            }
-        }
-
-        /// <summary>
-        /// Returns the length in bytes of each raw document in a
-        ///  contiguous range of length numDocs starting with
-        ///  startDocID.  Returns the IndexInput (the fieldStream),
-        ///  already seeked to the starting point for startDocID.
-        /// </summary>
-        public IndexInput RawDocs(int[] lengths, int startDocID, int numDocs)
-        {
-            SeekIndex(startDocID);
-            long startOffset = indexStream.ReadInt64();
-            long lastOffset = startOffset;
-            int count = 0;
-            while (count < numDocs)
-            {
-                long offset;
-                int docID = startDocID + count + 1;
-                Debug.Assert(docID <= numTotalDocs);
-                if (docID < numTotalDocs)
-                {
-                    offset = indexStream.ReadInt64();
-                }
-                else
-                {
-                    offset = fieldsStream.Length;
-                }
-                lengths[count++] = (int)(offset - lastOffset);
-                lastOffset = offset;
-            }
-
-            fieldsStream.Seek(startOffset);
-
-            return fieldsStream;
-        }
-
-        public override long RamBytesUsed()
-        {
-            return 0;
-        }
-
-        public override void CheckIntegrity()
-        {
-        }
-    }
-}
\ No newline at end of file


Mime
View raw message