lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [36/62] [abbrv] [partial] lucenenet git commit: Renamed Lucene.Net.Core folder Lucene.Net because the dotnet.exe pack command doesn't allow creating a NuGet package with a different name than its folder. Working around it with the script was much more co
Date Tue, 04 Apr 2017 17:19:42 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
deleted file mode 100644
index b33e43f..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40StoredFieldsWriter.cs
+++ /dev/null
@@ -1,411 +0,0 @@
-using Lucene.Net.Support;
-using System;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using AtomicReader = Lucene.Net.Index.AtomicReader;
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Directory = Lucene.Net.Store.Directory;
-    using Document = Documents.Document;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IIndexableField = Lucene.Net.Index.IIndexableField;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using MergeState = Lucene.Net.Index.MergeState;
-    using SegmentReader = Lucene.Net.Index.SegmentReader;
-
-    /// <summary>
-    /// Class responsible for writing stored document fields.
-    /// <p/>
-    /// It uses &lt;segment&gt;.fdt and &lt;segment&gt;.fdx; files.
-    /// </summary>
-    /// <seealso cref= Lucene40StoredFieldsFormat
-    /// @lucene.experimental  </seealso>
-    public sealed class Lucene40StoredFieldsWriter : StoredFieldsWriter
-    {
-        // NOTE: bit 0 is free here!  You can steal it!
-        internal static readonly int FIELD_IS_BINARY = 1 << 1;
-
-        // the old bit 1 << 2 was compressed, is now left out
-
-        private const int _NUMERIC_BIT_SHIFT = 3;
-        internal static readonly int FIELD_IS_NUMERIC_MASK = 0x07 << _NUMERIC_BIT_SHIFT;
-
-        internal const int FIELD_IS_NUMERIC_INT = 1 << _NUMERIC_BIT_SHIFT;
-        internal const int FIELD_IS_NUMERIC_LONG = 2 << _NUMERIC_BIT_SHIFT;
-        internal const int FIELD_IS_NUMERIC_FLOAT = 3 << _NUMERIC_BIT_SHIFT;
-        internal const int FIELD_IS_NUMERIC_DOUBLE = 4 << _NUMERIC_BIT_SHIFT;
-
-        // the next possible bits are: 1 << 6; 1 << 7
-        // currently unused: static final int FIELD_IS_NUMERIC_SHORT = 5 << _NUMERIC_BIT_SHIFT;
-        // currently unused: static final int FIELD_IS_NUMERIC_BYTE = 6 << _NUMERIC_BIT_SHIFT;
-
-        internal const string CODEC_NAME_IDX = "Lucene40StoredFieldsIndex";
-        internal const string CODEC_NAME_DAT = "Lucene40StoredFieldsData";
-        internal const int VERSION_START = 0;
-        internal const int VERSION_CURRENT = VERSION_START;
-        internal static readonly long HEADER_LENGTH_IDX = CodecUtil.HeaderLength(CODEC_NAME_IDX);
-        internal static readonly long HEADER_LENGTH_DAT = CodecUtil.HeaderLength(CODEC_NAME_DAT);
-
-        /// <summary>
-        /// Extension of stored fields file </summary>
-        public const string FIELDS_EXTENSION = "fdt";
-
-        /// <summary>
-        /// Extension of stored fields index file </summary>
-        public const string FIELDS_INDEX_EXTENSION = "fdx";
-
-        private readonly Directory directory;
-        private readonly string segment;
-        private IndexOutput fieldsStream;
-        private IndexOutput indexStream;
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40StoredFieldsWriter(Directory directory, string segment, IOContext context)
-        {
-            Debug.Assert(directory != null);
-            this.directory = directory;
-            this.segment = segment;
-
-            bool success = false;
-            try
-            {
-                fieldsStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), context);
-                indexStream = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION), context);
-
-                CodecUtil.WriteHeader(fieldsStream, CODEC_NAME_DAT, VERSION_CURRENT);
-                CodecUtil.WriteHeader(indexStream, CODEC_NAME_IDX, VERSION_CURRENT);
-                Debug.Assert(HEADER_LENGTH_DAT == fieldsStream.GetFilePointer());
-                Debug.Assert(HEADER_LENGTH_IDX == indexStream.GetFilePointer());
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    Abort();
-                }
-            }
-        }
-
-        // Writes the contents of buffer into the fields stream
-        // and adds a new entry for this document into the index
-        // stream.  this assumes the buffer was already written
-        // in the correct fields format.
-        public override void StartDocument(int numStoredFields)
-        {
-            indexStream.WriteInt64(fieldsStream.GetFilePointer());
-            fieldsStream.WriteVInt32(numStoredFields);
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                try
-                {
-                    IOUtils.Close(fieldsStream, indexStream);
-                }
-                finally
-                {
-                    fieldsStream = indexStream = null;
-                }
-            }
-        }
-
-        public override void Abort()
-        {
-            try
-            {
-                Dispose();
-            }
-            catch (Exception)
-            {
-            }
-            IOUtils.DeleteFilesIgnoringExceptions(directory, IndexFileNames.SegmentFileName(segment, "", FIELDS_EXTENSION), IndexFileNames.SegmentFileName(segment, "", FIELDS_INDEX_EXTENSION));
-        }
-
-        public override void WriteField(FieldInfo info, IIndexableField field)
-        {
-            fieldsStream.WriteVInt32(info.Number);
-            int bits = 0;
-            BytesRef bytes;
-            string @string;
-            // TODO: maybe a field should serialize itself?
-            // this way we don't bake into indexer all these
-            // specific encodings for different fields?  and apps
-            // can customize...
-
-            object number = (object)field.GetNumericValue();
-            if (number != null)
-            {
-                if (number is sbyte || number is short || number is int)
-                {
-                    bits |= FIELD_IS_NUMERIC_INT;
-                }
-                else if (number is long)
-                {
-                    bits |= FIELD_IS_NUMERIC_LONG;
-                }
-                else if (number is float)
-                {
-                    bits |= FIELD_IS_NUMERIC_FLOAT;
-                }
-                else if (number is double)
-                {
-                    bits |= FIELD_IS_NUMERIC_DOUBLE;
-                }
-                else
-                {
-                    throw new System.ArgumentException("cannot store numeric type " + number.GetType());
-                }
-                @string = null;
-                bytes = null;
-            }
-            else
-            {
-                bytes = field.GetBinaryValue();
-                if (bytes != null)
-                {
-                    bits |= FIELD_IS_BINARY;
-                    @string = null;
-                }
-                else
-                {
-                    @string = field.GetStringValue();
-                    if (@string == null)
-                    {
-                        throw new System.ArgumentException("field " + field.Name + " is stored but does not have binaryValue, stringValue nor numericValue");
-                    }
-                }
-            }
-
-            fieldsStream.WriteByte((byte)(sbyte)bits);
-
-            if (bytes != null)
-            {
-                fieldsStream.WriteVInt32(bytes.Length);
-                fieldsStream.WriteBytes(bytes.Bytes, bytes.Offset, bytes.Length);
-            }
-            else if (@string != null)
-            {
-                fieldsStream.WriteString(field.GetStringValue());
-            }
-            else
-            {
-                if (number is sbyte || number is short || number is int)
-                {
-                    fieldsStream.WriteInt32((int)number);
-                }
-                else if (number is long)
-                {
-                    fieldsStream.WriteInt64((long)number);
-                }
-                else if (number is float)
-                {
-                    fieldsStream.WriteInt32(Number.SingleToInt32Bits((float)number));
-                }
-                else if (number is double)
-                {
-                    fieldsStream.WriteInt64(BitConverter.DoubleToInt64Bits((double)number));
-                }
-                else
-                {
-                    throw new InvalidOperationException("Cannot get here");
-                }
-            }
-        }
-
-        /// <summary>
-        /// Bulk write a contiguous series of documents.  The
-        ///  lengths array is the length (in bytes) of each raw
-        ///  document.  The stream IndexInput is the
-        ///  fieldsStream from which we should bulk-copy all
-        ///  bytes.
-        /// </summary>
-        public void AddRawDocuments(IndexInput stream, int[] lengths, int numDocs)
-        {
-            long position = fieldsStream.GetFilePointer();
-            long start = position;
-            for (int i = 0; i < numDocs; i++)
-            {
-                indexStream.WriteInt64(position);
-                position += lengths[i];
-            }
-            fieldsStream.CopyBytes(stream, position - start);
-            Debug.Assert(fieldsStream.GetFilePointer() == position);
-        }
-
-        public override void Finish(FieldInfos fis, int numDocs)
-        {
-            if (HEADER_LENGTH_IDX + ((long)numDocs) * 8 != indexStream.GetFilePointer())
-            // this is most likely a bug in Sun JRE 1.6.0_04/_05;
-            // we detect that the bug has struck, here, and
-            // throw an exception to prevent the corruption from
-            // entering the index.  See LUCENE-1282 for
-            // details.
-            {
-                throw new Exception("fdx size mismatch: docCount is " + numDocs + " but fdx file size is " + indexStream.GetFilePointer() + " file=" + indexStream.ToString() + "; now aborting this merge to prevent index corruption");
-            }
-        }
-
-        public override int Merge(MergeState mergeState)
-        {
-            int docCount = 0;
-            // Used for bulk-reading raw bytes for stored fields
-            int[] rawDocLengths = new int[MAX_RAW_MERGE_DOCS];
-            int idx = 0;
-
-            foreach (AtomicReader reader in mergeState.Readers)
-            {
-                SegmentReader matchingSegmentReader = mergeState.MatchingSegmentReaders[idx++];
-                Lucene40StoredFieldsReader matchingFieldsReader = null;
-                if (matchingSegmentReader != null)
-                {
-                    StoredFieldsReader fieldsReader = matchingSegmentReader.FieldsReader;
-                    // we can only bulk-copy if the matching reader is also a Lucene40FieldsReader
-                    if (fieldsReader != null && fieldsReader is Lucene40StoredFieldsReader)
-                    {
-                        matchingFieldsReader = (Lucene40StoredFieldsReader)fieldsReader;
-                    }
-                }
-
-                if (reader.LiveDocs != null)
-                {
-                    docCount += CopyFieldsWithDeletions(mergeState, reader, matchingFieldsReader, rawDocLengths);
-                }
-                else
-                {
-                    docCount += CopyFieldsNoDeletions(mergeState, reader, matchingFieldsReader, rawDocLengths);
-                }
-            }
-            Finish(mergeState.FieldInfos, docCount);
-            return docCount;
-        }
-
-        /// <summary>
-        /// Maximum number of contiguous documents to bulk-copy
-        ///    when merging stored fields
-        /// </summary>
-        private const int MAX_RAW_MERGE_DOCS = 4192;
-
-        private int CopyFieldsWithDeletions(MergeState mergeState, AtomicReader reader, Lucene40StoredFieldsReader matchingFieldsReader, int[] rawDocLengths)
-        {
-            int docCount = 0;
-            int maxDoc = reader.MaxDoc;
-            IBits liveDocs = reader.LiveDocs;
-            Debug.Assert(liveDocs != null);
-            if (matchingFieldsReader != null)
-            {
-                // We can bulk-copy because the fieldInfos are "congruent"
-                for (int j = 0; j < maxDoc; )
-                {
-                    if (!liveDocs.Get(j))
-                    {
-                        // skip deleted docs
-                        ++j;
-                        continue;
-                    }
-                    // We can optimize this case (doing a bulk byte copy) since the field
-                    // numbers are identical
-                    int start = j, numDocs = 0;
-                    do
-                    {
-                        j++;
-                        numDocs++;
-                        if (j >= maxDoc)
-                        {
-                            break;
-                        }
-                        if (!liveDocs.Get(j))
-                        {
-                            j++;
-                            break;
-                        }
-                    } while (numDocs < MAX_RAW_MERGE_DOCS);
-
-                    IndexInput stream = matchingFieldsReader.RawDocs(rawDocLengths, start, numDocs);
-                    AddRawDocuments(stream, rawDocLengths, numDocs);
-                    docCount += numDocs;
-                    mergeState.CheckAbort.Work(300 * numDocs);
-                }
-            }
-            else
-            {
-                for (int j = 0; j < maxDoc; j++)
-                {
-                    if (!liveDocs.Get(j))
-                    {
-                        // skip deleted docs
-                        continue;
-                    }
-                    // TODO: this could be more efficient using
-                    // FieldVisitor instead of loading/writing entire
-                    // doc; ie we just have to renumber the field number
-                    // on the fly?
-                    // NOTE: it's very important to first assign to doc then pass it to
-                    // fieldsWriter.addDocument; see LUCENE-1282
-                    Document doc = reader.Document(j);
-                    AddDocument(doc, mergeState.FieldInfos);
-                    docCount++;
-                    mergeState.CheckAbort.Work(300);
-                }
-            }
-            return docCount;
-        }
-
-        private int CopyFieldsNoDeletions(MergeState mergeState, AtomicReader reader, Lucene40StoredFieldsReader matchingFieldsReader, int[] rawDocLengths)
-        {
-            int maxDoc = reader.MaxDoc;
-            int docCount = 0;
-            if (matchingFieldsReader != null)
-            {
-                // We can bulk-copy because the fieldInfos are "congruent"
-                while (docCount < maxDoc)
-                {
-                    int len = Math.Min(MAX_RAW_MERGE_DOCS, maxDoc - docCount);
-                    IndexInput stream = matchingFieldsReader.RawDocs(rawDocLengths, docCount, len);
-                    AddRawDocuments(stream, rawDocLengths, len);
-                    docCount += len;
-                    mergeState.CheckAbort.Work(300 * len);
-                }
-            }
-            else
-            {
-                for (; docCount < maxDoc; docCount++)
-                {
-                    // NOTE: it's very important to first assign to doc then pass it to
-                    // fieldsWriter.addDocument; see LUCENE-1282
-                    Document doc = reader.Document(docCount);
-                    AddDocument(doc, mergeState.FieldInfos);
-                    mergeState.CheckAbort.Work(300);
-                }
-            }
-            return docCount;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsFormat.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsFormat.cs
deleted file mode 100644
index ce91826..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsFormat.cs
+++ /dev/null
@@ -1,128 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    // javadocs
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-
-    /// <summary>
-    /// Lucene 4.0 Term Vectors format.
-    /// <p>Term Vector support is an optional on a field by field basis. It consists of
-    /// 3 files.</p>
-    /// <ol>
-    /// <li><a name="tvx" id="tvx"></a>
-    /// <p>The Document Index or .tvx file.</p>
-    /// <p>For each document, this stores the offset into the document data (.tvd) and
-    /// field data (.tvf) files.</p>
-    /// <p>DocumentIndex (.tvx) --&gt; Header,&lt;DocumentPosition,FieldPosition&gt;
-    /// <sup>NumDocs</sup></p>
-    /// <ul>
-    ///   <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    ///   <li>DocumentPosition --&gt; <seealso cref="DataOutput#writeLong UInt64"/> (offset in the .tvd file)</li>
-    ///   <li>FieldPosition --&gt; <seealso cref="DataOutput#writeLong UInt64"/> (offset in the .tvf file)</li>
-    /// </ul>
-    /// </li>
-    /// <li><a name="tvd" id="tvd"></a>
-    /// <p>The Document or .tvd file.</p>
-    /// <p>this contains, for each document, the number of fields, a list of the fields
-    /// with term vector info and finally a list of pointers to the field information
-    /// in the .tvf (Term Vector Fields) file.</p>
-    /// <p>The .tvd file is used to map out the fields that have term vectors stored
-    /// and where the field information is in the .tvf file.</p>
-    /// <p>Document (.tvd) --&gt; Header,&lt;NumFields, FieldNums,
-    /// FieldPositions&gt; <sup>NumDocs</sup></p>
-    /// <ul>
-    ///   <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    ///   <li>NumFields --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>FieldNums --&gt; &lt;FieldNumDelta&gt; <sup>NumFields</sup></li>
-    ///   <li>FieldNumDelta --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>FieldPositions --&gt; &lt;FieldPositionDelta&gt; <sup>NumFields-1</sup></li>
-    ///   <li>FieldPositionDelta --&gt; <seealso cref="DataOutput#writeVLong VLong"/></li>
-    /// </ul>
-    /// </li>
-    /// <li><a name="tvf" id="tvf"></a>
-    /// <p>The Field or .tvf file.</p>
-    /// <p>this file contains, for each field that has a term vector stored, a list of
-    /// the terms, their frequencies and, optionally, position, offset, and payload
-    /// information.</p>
-    /// <p>Field (.tvf) --&gt; Header,&lt;NumTerms, Flags, TermFreqs&gt;
-    /// <sup>NumFields</sup></p>
-    /// <ul>
-    ///   <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    ///   <li>NumTerms --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>Flags --&gt; <seealso cref="DataOutput#writeByte Byte"/></li>
-    ///   <li>TermFreqs --&gt; &lt;TermText, TermFreq, Positions?, PayloadData?, Offsets?&gt;
-    ///       <sup>NumTerms</sup></li>
-    ///   <li>TermText --&gt; &lt;PrefixLength, Suffix&gt;</li>
-    ///   <li>PrefixLength --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>Suffix --&gt; <seealso cref="DataOutput#writeString String"/></li>
-    ///   <li>TermFreq --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>Positions --&gt; &lt;PositionDelta PayloadLength?&gt;<sup>TermFreq</sup></li>
-    ///   <li>PositionDelta --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>PayloadLength --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>PayloadData --&gt; <seealso cref="DataOutput#writeByte Byte"/><sup>NumPayloadBytes</sup></li>
-    ///   <li>Offsets --&gt; &lt;<seealso cref="DataOutput#writeVInt VInt"/>, <seealso cref="DataOutput#writeVInt VInt"/>&gt;<sup>TermFreq</sup></li>
-    /// </ul>
-    /// <p>Notes:</p>
-    /// <ul>
-    /// <li>Flags byte stores whether this term vector has position, offset, payload.
-    /// information stored.</li>
-    /// <li>Term byte prefixes are shared. The PrefixLength is the number of initial
-    /// bytes from the previous term which must be pre-pended to a term's suffix
-    /// in order to form the term's bytes. Thus, if the previous term's text was "bone"
-    /// and the term is "boy", the PrefixLength is two and the suffix is "y".</li>
-    /// <li>PositionDelta is, if payloads are disabled for the term's field, the
-    /// difference between the position of the current occurrence in the document and
-    /// the previous occurrence (or zero, if this is the first occurrence in this
-    /// document). If payloads are enabled for the term's field, then PositionDelta/2
-    /// is the difference between the current and the previous position. If payloads
-    /// are enabled and PositionDelta is odd, then PayloadLength is stored, indicating
-    /// the length of the payload at the current term position.</li>
-    /// <li>PayloadData is metadata associated with a term position. If
-    /// PayloadLength is stored at the current position, then it indicates the length
-    /// of this payload. If PayloadLength is not stored, then this payload has the same
-    /// length as the payload at the previous position. PayloadData encodes the
-    /// concatenated bytes for all of a terms occurrences.</li>
-    /// <li>Offsets are stored as delta encoded VInts. The first VInt is the
-    /// startOffset, the second is the endOffset.</li>
-    /// </ul>
-    /// </li>
-    /// </ol>
-    /// </summary>
-    public class Lucene40TermVectorsFormat : TermVectorsFormat
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40TermVectorsFormat()
-        {
-        }
-
-        public override TermVectorsReader VectorsReader(Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
-        {
-            return new Lucene40TermVectorsReader(directory, segmentInfo, fieldInfos, context);
-        }
-
-        public override TermVectorsWriter VectorsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context)
-        {
-            return new Lucene40TermVectorsWriter(directory, segmentInfo.Name, context);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
deleted file mode 100644
index a0de655..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsReader.cs
+++ /dev/null
@@ -1,909 +0,0 @@
-using Lucene.Net.Index;
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Directory = Lucene.Net.Store.Directory;
-    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using Fields = Lucene.Net.Index.Fields;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using SegmentInfo = Lucene.Net.Index.SegmentInfo;
-    using Terms = Lucene.Net.Index.Terms;
-    using TermsEnum = Lucene.Net.Index.TermsEnum;
-
-    /// <summary>
-    /// Lucene 4.0 Term Vectors reader.
-    /// <p>
-    /// It reads .tvd, .tvf, and .tvx files.
-    /// </summary>
-    /// <seealso cref= Lucene40TermVectorsFormat </seealso>
-    public class Lucene40TermVectorsReader : TermVectorsReader, IDisposable
-    {
-        internal const sbyte STORE_POSITIONS_WITH_TERMVECTOR = 0x1;
-
-        internal const sbyte STORE_OFFSET_WITH_TERMVECTOR = 0x2;
-
-        internal const sbyte STORE_PAYLOAD_WITH_TERMVECTOR = 0x4;
-
-        /// <summary>
-        /// Extension of vectors fields file </summary>
-        internal const string VECTORS_FIELDS_EXTENSION = "tvf";
-
-        /// <summary>
-        /// Extension of vectors documents file </summary>
-        internal const string VECTORS_DOCUMENTS_EXTENSION = "tvd";
-
-        /// <summary>
-        /// Extension of vectors index file </summary>
-        internal const string VECTORS_INDEX_EXTENSION = "tvx";
-
-        internal const string CODEC_NAME_FIELDS = "Lucene40TermVectorsFields";
-        internal const string CODEC_NAME_DOCS = "Lucene40TermVectorsDocs";
-        internal const string CODEC_NAME_INDEX = "Lucene40TermVectorsIndex";
-
-        internal const int VERSION_NO_PAYLOADS = 0;
-        internal const int VERSION_PAYLOADS = 1;
-        internal const int VERSION_START = VERSION_NO_PAYLOADS;
-        internal const int VERSION_CURRENT = VERSION_PAYLOADS;
-
-        internal static readonly long HEADER_LENGTH_FIELDS = CodecUtil.HeaderLength(CODEC_NAME_FIELDS);
-        internal static readonly long HEADER_LENGTH_DOCS = CodecUtil.HeaderLength(CODEC_NAME_DOCS);
-        internal static readonly long HEADER_LENGTH_INDEX = CodecUtil.HeaderLength(CODEC_NAME_INDEX);
-
-        private FieldInfos fieldInfos;
-
-        private IndexInput tvx;
-        private IndexInput tvd;
-        private IndexInput tvf;
-        private int size;
-        private int numTotalDocs;
-
-        /// <summary>
-        /// Used by clone. </summary>
-        internal Lucene40TermVectorsReader(FieldInfos fieldInfos, IndexInput tvx, IndexInput tvd, IndexInput tvf, int size, int numTotalDocs)
-        {
-            this.fieldInfos = fieldInfos;
-            this.tvx = tvx;
-            this.tvd = tvd;
-            this.tvf = tvf;
-            this.size = size;
-            this.numTotalDocs = numTotalDocs;
-        }
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40TermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldInfos, IOContext context)
-        {
-            string segment = si.Name;
-            int size = si.DocCount;
-
-            bool success = false;
-
-            try
-            {
-                string idxName = IndexFileNames.SegmentFileName(segment, "", VECTORS_INDEX_EXTENSION);
-                tvx = d.OpenInput(idxName, context);
-                int tvxVersion = CodecUtil.CheckHeader(tvx, CODEC_NAME_INDEX, VERSION_START, VERSION_CURRENT);
-
-                string fn = IndexFileNames.SegmentFileName(segment, "", VECTORS_DOCUMENTS_EXTENSION);
-                tvd = d.OpenInput(fn, context);
-                int tvdVersion = CodecUtil.CheckHeader(tvd, CODEC_NAME_DOCS, VERSION_START, VERSION_CURRENT);
-                fn = IndexFileNames.SegmentFileName(segment, "", VECTORS_FIELDS_EXTENSION);
-                tvf = d.OpenInput(fn, context);
-                int tvfVersion = CodecUtil.CheckHeader(tvf, CODEC_NAME_FIELDS, VERSION_START, VERSION_CURRENT);
-                Debug.Assert(HEADER_LENGTH_INDEX == tvx.GetFilePointer());
-                Debug.Assert(HEADER_LENGTH_DOCS == tvd.GetFilePointer());
-                Debug.Assert(HEADER_LENGTH_FIELDS == tvf.GetFilePointer());
-                Debug.Assert(tvxVersion == tvdVersion);
-                Debug.Assert(tvxVersion == tvfVersion);
-
-                numTotalDocs = (int)(tvx.Length - HEADER_LENGTH_INDEX >> 4);
-
-                this.size = numTotalDocs;
-                Debug.Assert(size == 0 || numTotalDocs == size);
-
-                this.fieldInfos = fieldInfos;
-                success = true;
-            }
-            finally
-            {
-                // With lock-less commits, it's entirely possible (and
-                // fine) to hit a FileNotFound exception above. In
-                // this case, we want to explicitly close any subset
-                // of things that were opened so that we don't have to
-                // wait for a GC to do so.
-                if (!success)
-                {
-                    try
-                    {
-                        Dispose();
-                    } // ensure we throw our original exception
-#pragma warning disable 168
-                    catch (Exception t)
-#pragma warning restore 168
-                    {
-                    }
-                }
-            }
-        }
-
-        // Used for bulk copy when merging
-        internal virtual IndexInput TvdStream
-        {
-            get
-            {
-                return tvd;
-            }
-        }
-
-        // Used for bulk copy when merging
-        internal virtual IndexInput TvfStream
-        {
-            get
-            {
-                return tvf;
-            }
-        }
-
-        // Not private to avoid synthetic access$NNN methods
-        internal virtual void SeekTvx(int docNum)
-        {
-            tvx.Seek(docNum * 16L + HEADER_LENGTH_INDEX);
-        }
-
-        /// <summary>
-        /// Retrieve the length (in bytes) of the tvd and tvf
-        ///  entries for the next numDocs starting with
-        ///  startDocID.  this is used for bulk copying when
-        ///  merging segments, if the field numbers are
-        ///  congruent.  Once this returns, the tvf & tvd streams
-        ///  are seeked to the startDocID.
-        /// </summary>
-        internal void RawDocs(int[] tvdLengths, int[] tvfLengths, int startDocID, int numDocs)
-        {
-            if (tvx == null)
-            {
-                Arrays.Fill(tvdLengths, 0);
-                Arrays.Fill(tvfLengths, 0);
-                return;
-            }
-
-            SeekTvx(startDocID);
-
-            long tvdPosition = tvx.ReadInt64();
-            tvd.Seek(tvdPosition);
-
-            long tvfPosition = tvx.ReadInt64();
-            tvf.Seek(tvfPosition);
-
-            long lastTvdPosition = tvdPosition;
-            long lastTvfPosition = tvfPosition;
-
-            int count = 0;
-            while (count < numDocs)
-            {
-                int docID = startDocID + count + 1;
-                Debug.Assert(docID <= numTotalDocs);
-                if (docID < numTotalDocs)
-                {
-                    tvdPosition = tvx.ReadInt64();
-                    tvfPosition = tvx.ReadInt64();
-                }
-                else
-                {
-                    tvdPosition = tvd.Length;
-                    tvfPosition = tvf.Length;
-                    Debug.Assert(count == numDocs - 1);
-                }
-                tvdLengths[count] = (int)(tvdPosition - lastTvdPosition);
-                tvfLengths[count] = (int)(tvfPosition - lastTvfPosition);
-                count++;
-                lastTvdPosition = tvdPosition;
-                lastTvfPosition = tvfPosition;
-            }
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-                IOUtils.Close(tvx, tvd, tvf);
-        }
-
-        /// <summary>
-        /// The number of documents in the reader 
-        /// NOTE: This was size() in Lucene.
-        /// </summary>
-        internal virtual int Count
-        {
-            get { return size; }
-        }
-
-        private class TVFields : Fields
-        {
-            private readonly Lucene40TermVectorsReader outerInstance;
-
-            private readonly int[] fieldNumbers;
-            private readonly long[] fieldFPs;
-            private readonly IDictionary<int, int> fieldNumberToIndex = new Dictionary<int, int>();
-
-            public TVFields(Lucene40TermVectorsReader outerInstance, int docID)
-            {
-                this.outerInstance = outerInstance;
-                outerInstance.SeekTvx(docID);
-                outerInstance.tvd.Seek(outerInstance.tvx.ReadInt64());
-
-                int fieldCount = outerInstance.tvd.ReadVInt32();
-                Debug.Assert(fieldCount >= 0);
-                if (fieldCount != 0)
-                {
-                    fieldNumbers = new int[fieldCount];
-                    fieldFPs = new long[fieldCount];
-                    for (int fieldUpto = 0; fieldUpto < fieldCount; fieldUpto++)
-                    {
-                        int fieldNumber = outerInstance.tvd.ReadVInt32();
-                        fieldNumbers[fieldUpto] = fieldNumber;
-                        fieldNumberToIndex[fieldNumber] = fieldUpto;
-                    }
-
-                    long position = outerInstance.tvx.ReadInt64();
-                    fieldFPs[0] = position;
-                    for (int fieldUpto = 1; fieldUpto < fieldCount; fieldUpto++)
-                    {
-                        position += outerInstance.tvd.ReadVInt64();
-                        fieldFPs[fieldUpto] = position;
-                    }
-                }
-                else
-                {
-                    // TODO: we can improve writer here, eg write 0 into
-                    // tvx file, so we know on first read from tvx that
-                    // this doc has no TVs
-                    fieldNumbers = null;
-                    fieldFPs = null;
-                }
-            }
-
-            public override IEnumerator<string> GetEnumerator()
-            {
-                return GetFieldInfoEnumerable().GetEnumerator();
-            }
-
-            private IEnumerable<string> GetFieldInfoEnumerable()
-            {
-                int fieldUpto = 0;
-                while (fieldNumbers != null && fieldUpto < fieldNumbers.Length)
-                {
-                    yield return outerInstance.fieldInfos.FieldInfo(fieldNumbers[fieldUpto++]).Name;
-                }
-            }
-
-            public override Terms GetTerms(string field)
-            {
-                FieldInfo fieldInfo = outerInstance.fieldInfos.FieldInfo(field);
-                if (fieldInfo == null)
-                {
-                    // No such field
-                    return null;
-                }
-
-                int fieldIndex;
-                if (!fieldNumberToIndex.TryGetValue(fieldInfo.Number, out fieldIndex))
-                {
-                    // Term vectors were not indexed for this field
-                    return null;
-                }
-
-                return new TVTerms(outerInstance, fieldFPs[fieldIndex]);
-            }
-
-            public override int Count
-            {
-                get
-                {
-                    if (fieldNumbers == null)
-                    {
-                        return 0;
-                    }
-                    else
-                    {
-                        return fieldNumbers.Length;
-                    }
-                }
-            }
-        }
-
-        private class TVTerms : Terms
-        {
-            private readonly Lucene40TermVectorsReader outerInstance;
-
-            private readonly int numTerms;
-            private readonly long tvfFPStart;
-            private readonly bool storePositions;
-            private readonly bool storeOffsets;
-            private readonly bool storePayloads;
-
-            public TVTerms(Lucene40TermVectorsReader outerInstance, long tvfFP)
-            {
-                this.outerInstance = outerInstance;
-                outerInstance.tvf.Seek(tvfFP);
-                numTerms = outerInstance.tvf.ReadVInt32();
-                byte bits = outerInstance.tvf.ReadByte();
-                storePositions = (bits & STORE_POSITIONS_WITH_TERMVECTOR) != 0;
-                storeOffsets = (bits & STORE_OFFSET_WITH_TERMVECTOR) != 0;
-                storePayloads = (bits & STORE_PAYLOAD_WITH_TERMVECTOR) != 0;
-                tvfFPStart = outerInstance.tvf.GetFilePointer();
-            }
-
-            public override TermsEnum GetIterator(TermsEnum reuse)
-            {
-                TVTermsEnum termsEnum;
-                if (reuse is TVTermsEnum)
-                {
-                    termsEnum = (TVTermsEnum)reuse;
-                    if (!termsEnum.CanReuse(outerInstance.tvf))
-                    {
-                        termsEnum = new TVTermsEnum(outerInstance);
-                    }
-                }
-                else
-                {
-                    termsEnum = new TVTermsEnum(outerInstance);
-                }
-                termsEnum.Reset(numTerms, tvfFPStart, storePositions, storeOffsets, storePayloads);
-                return termsEnum;
-            }
-
-            public override long Count
-            {
-                get { return numTerms; }
-            }
-
-            public override long SumTotalTermFreq
-            {
-                get
-                {
-                    return -1;
-                }
-            }
-
-            public override long SumDocFreq
-            {
-                get
-                {
-                    // Every term occurs in just one doc:
-                    return numTerms;
-                }
-            }
-
-            public override int DocCount
-            {
-                get
-                {
-                    return 1;
-                }
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    // TODO: really indexer hardwires
-                    // this...?  I guess codec could buffer and re-sort...
-                    return BytesRef.UTF8SortedAsUnicodeComparer;
-                }
-            }
-
-            public override bool HasFreqs
-            {
-                get { return true; }
-            }
-
-            public override bool HasOffsets
-            {
-                get { return storeOffsets; }
-            }
-
-            public override bool HasPositions
-            {
-                get { return storePositions; }
-            }
-
-            public override bool HasPayloads
-            {
-                get { return storePayloads; }
-            }
-        }
-
-        private class TVTermsEnum : TermsEnum
-        {
-            private readonly Lucene40TermVectorsReader outerInstance;
-
-            private readonly IndexInput origTVF;
-            private readonly IndexInput tvf;
-            private int numTerms;
-            private int nextTerm;
-            private int freq;
-            private readonly BytesRef lastTerm = new BytesRef();
-            private readonly BytesRef term = new BytesRef();
-            private bool storePositions;
-            private bool storeOffsets;
-            private bool storePayloads;
-            private long tvfFP;
-
-            private int[] positions;
-            private int[] startOffsets;
-            private int[] endOffsets;
-
-            // one shared byte[] for any term's payloads
-            private int[] payloadOffsets;
-
-            private int lastPayloadLength;
-            private byte[] payloadData;
-
-            // NOTE: tvf is pre-positioned by caller
-            public TVTermsEnum(Lucene40TermVectorsReader outerInstance)
-            {
-                this.outerInstance = outerInstance;
-                this.origTVF = outerInstance.tvf;
-                tvf = (IndexInput)origTVF.Clone();
-            }
-
-            public virtual bool CanReuse(IndexInput tvf)
-            {
-                return tvf == origTVF;
-            }
-
-            public virtual void Reset(int numTerms, long tvfFPStart, bool storePositions, bool storeOffsets, bool storePayloads)
-            {
-                this.numTerms = numTerms;
-                this.storePositions = storePositions;
-                this.storeOffsets = storeOffsets;
-                this.storePayloads = storePayloads;
-                nextTerm = 0;
-                tvf.Seek(tvfFPStart);
-                tvfFP = tvfFPStart;
-                positions = null;
-                startOffsets = null;
-                endOffsets = null;
-                payloadOffsets = null;
-                payloadData = null;
-                lastPayloadLength = -1;
-            }
-
-            // NOTE: slow!  (linear scan)
-            public override SeekStatus SeekCeil(BytesRef text)
-            {
-                if (nextTerm != 0)
-                {
-                    int cmp = text.CompareTo(term);
-                    if (cmp < 0)
-                    {
-                        nextTerm = 0;
-                        tvf.Seek(tvfFP);
-                    }
-                    else if (cmp == 0)
-                    {
-                        return SeekStatus.FOUND;
-                    }
-                }
-
-                while (Next() != null)
-                {
-                    int cmp = text.CompareTo(term);
-                    if (cmp < 0)
-                    {
-                        return SeekStatus.NOT_FOUND;
-                    }
-                    else if (cmp == 0)
-                    {
-                        return SeekStatus.FOUND;
-                    }
-                }
-
-                return SeekStatus.END;
-            }
-
-            public override void SeekExact(long ord)
-            {
-                throw new System.NotSupportedException();
-            }
-
-            public override BytesRef Next()
-            {
-                if (nextTerm >= numTerms)
-                {
-                    return null;
-                }
-                term.CopyBytes(lastTerm);
-                int start = tvf.ReadVInt32();
-                int deltaLen = tvf.ReadVInt32();
-                term.Length = start + deltaLen;
-                term.Grow(term.Length);
-                tvf.ReadBytes(term.Bytes, start, deltaLen);
-                freq = tvf.ReadVInt32();
-
-                if (storePayloads)
-                {
-                    positions = new int[freq];
-                    payloadOffsets = new int[freq];
-                    int totalPayloadLength = 0;
-                    int pos = 0;
-                    for (int posUpto = 0; posUpto < freq; posUpto++)
-                    {
-                        int code = tvf.ReadVInt32();
-                        pos += (int)((uint)code >> 1);
-                        positions[posUpto] = pos;
-                        if ((code & 1) != 0)
-                        {
-                            // length change
-                            lastPayloadLength = tvf.ReadVInt32();
-                        }
-                        payloadOffsets[posUpto] = totalPayloadLength;
-                        totalPayloadLength += lastPayloadLength;
-                        Debug.Assert(totalPayloadLength >= 0);
-                    }
-                    payloadData = new byte[totalPayloadLength];
-                    tvf.ReadBytes(payloadData, 0, payloadData.Length);
-                } // no payloads
-                else if (storePositions)
-                {
-                    // TODO: we could maybe reuse last array, if we can
-                    // somehow be careful about consumer never using two
-                    // D&PEnums at once...
-                    positions = new int[freq];
-                    int pos = 0;
-                    for (int posUpto = 0; posUpto < freq; posUpto++)
-                    {
-                        pos += tvf.ReadVInt32();
-                        positions[posUpto] = pos;
-                    }
-                }
-
-                if (storeOffsets)
-                {
-                    startOffsets = new int[freq];
-                    endOffsets = new int[freq];
-                    int offset = 0;
-                    for (int posUpto = 0; posUpto < freq; posUpto++)
-                    {
-                        startOffsets[posUpto] = offset + tvf.ReadVInt32();
-                        offset = endOffsets[posUpto] = startOffsets[posUpto] + tvf.ReadVInt32();
-                    }
-                }
-
-                lastTerm.CopyBytes(term);
-                nextTerm++;
-                return term;
-            }
-
-            public override BytesRef Term
-            {
-                get { return term; }
-            }
-
-            public override long Ord
-            {
-                get { throw new System.NotSupportedException(); }
-            }
-
-            public override int DocFreq
-            {
-                get { return 1; }
-            }
-
-            public override long TotalTermFreq
-            {
-                get { return freq; }
-            }
-
-            public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags) // ignored
-            {
-                TVDocsEnum docsEnum;
-                if (reuse != null && reuse is TVDocsEnum)
-                {
-                    docsEnum = (TVDocsEnum)reuse;
-                }
-                else
-                {
-                    docsEnum = new TVDocsEnum();
-                }
-                docsEnum.Reset(liveDocs, freq);
-                return docsEnum;
-            }
-
-            public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags)
-            {
-                if (!storePositions && !storeOffsets)
-                {
-                    return null;
-                }
-
-                TVDocsAndPositionsEnum docsAndPositionsEnum;
-                if (reuse != null && reuse is TVDocsAndPositionsEnum)
-                {
-                    docsAndPositionsEnum = (TVDocsAndPositionsEnum)reuse;
-                }
-                else
-                {
-                    docsAndPositionsEnum = new TVDocsAndPositionsEnum();
-                }
-                docsAndPositionsEnum.Reset(liveDocs, positions, startOffsets, endOffsets, payloadOffsets, payloadData);
-                return docsAndPositionsEnum;
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return BytesRef.UTF8SortedAsUnicodeComparer;
-                }
-            }
-        }
-
-        // NOTE: sort of a silly class, since you can get the
-        // freq() already by TermsEnum.totalTermFreq
-        private class TVDocsEnum : DocsEnum
-        {
-            private bool didNext;
-            private int doc = -1;
-            private int freq;
-            private IBits liveDocs;
-
-            public override int Freq
-            {
-                get { return freq; }
-            }
-
-            public override int DocID
-            {
-                get { return doc; }
-            }
-
-            public override int NextDoc()
-            {
-                if (!didNext && (liveDocs == null || liveDocs.Get(0)))
-                {
-                    didNext = true;
-                    return (doc = 0);
-                }
-                else
-                {
-                    return (doc = NO_MORE_DOCS);
-                }
-            }
-
-            public override int Advance(int target)
-            {
-                return SlowAdvance(target);
-            }
-
-            public virtual void Reset(IBits liveDocs, int freq)
-            {
-                this.liveDocs = liveDocs;
-                this.freq = freq;
-                this.doc = -1;
-                didNext = false;
-            }
-
-            public override long GetCost()
-            {
-                return 1;
-            }
-        }
-
-        private sealed class TVDocsAndPositionsEnum : DocsAndPositionsEnum
-        {
-            private bool didNext;
-            private int doc = -1;
-            private int nextPos;
-            private IBits liveDocs;
-            private int[] positions;
-            private int[] startOffsets;
-            private int[] endOffsets;
-            private int[] payloadOffsets;
-            private readonly BytesRef payload = new BytesRef();
-            private byte[] payloadBytes;
-
-            public override int Freq
-            {
-                get
-                {
-                    if (positions != null)
-                    {
-                        return positions.Length;
-                    }
-                    else
-                    {
-                        Debug.Assert(startOffsets != null);
-                        return startOffsets.Length;
-                    }
-                }
-            }
-
-            public override int DocID
-            {
-                get { return doc; }
-            }
-
-            public override int NextDoc()
-            {
-                if (!didNext && (liveDocs == null || liveDocs.Get(0)))
-                {
-                    didNext = true;
-                    return (doc = 0);
-                }
-                else
-                {
-                    return (doc = NO_MORE_DOCS);
-                }
-            }
-
-            public override int Advance(int target)
-            {
-                return SlowAdvance(target);
-            }
-
-            public void Reset(IBits liveDocs, int[] positions, int[] startOffsets, int[] endOffsets, int[] payloadLengths, byte[] payloadBytes)
-            {
-                this.liveDocs = liveDocs;
-                this.positions = positions;
-                this.startOffsets = startOffsets;
-                this.endOffsets = endOffsets;
-                this.payloadOffsets = payloadLengths;
-                this.payloadBytes = payloadBytes;
-                this.doc = -1;
-                didNext = false;
-                nextPos = 0;
-            }
-
-            public override BytesRef GetPayload()
-            {
-                if (payloadOffsets == null)
-                {
-                    return null;
-                }
-                else
-                {
-                    int off = payloadOffsets[nextPos - 1];
-                    int end = nextPos == payloadOffsets.Length ? payloadBytes.Length : payloadOffsets[nextPos];
-                    if (end - off == 0)
-                    {
-                        return null;
-                    }
-                    payload.Bytes = payloadBytes;
-                    payload.Offset = off;
-                    payload.Length = end - off;
-                    return payload;
-                }
-            }
-
-            public override int NextPosition()
-            {
-                Debug.Assert((positions != null && nextPos < positions.Length) || startOffsets != null && nextPos < startOffsets.Length);
-
-                if (positions != null)
-                {
-                    return positions[nextPos++];
-                }
-                else
-                {
-                    nextPos++;
-                    return -1;
-                }
-            }
-
-            public override int StartOffset
-            {
-                get
-                {
-                    if (startOffsets == null)
-                    {
-                        return -1;
-                    }
-                    else
-                    {
-                        return startOffsets[nextPos - 1];
-                    }
-                }
-            }
-
-            public override int EndOffset
-            {
-                get
-                {
-                    if (endOffsets == null)
-                    {
-                        return -1;
-                    }
-                    else
-                    {
-                        return endOffsets[nextPos - 1];
-                    }
-                }
-            }
-
-            public override long GetCost()
-            {
-                return 1;
-            }
-        }
-
-        public override Fields Get(int docID)
-        {
-            if (tvx != null)
-            {
-                Fields fields = new TVFields(this, docID);
-                if (fields.Count == 0)
-                {
-                    // TODO: we can improve writer here, eg write 0 into
-                    // tvx file, so we know on first read from tvx that
-                    // this doc has no TVs
-                    return null;
-                }
-                else
-                {
-                    return fields;
-                }
-            }
-            else
-            {
-                return null;
-            }
-        }
-
-        public override object Clone()
-        {
-            IndexInput cloneTvx = null;
-            IndexInput cloneTvd = null;
-            IndexInput cloneTvf = null;
-
-            // These are null when a TermVectorsReader was created
-            // on a segment that did not have term vectors saved
-            if (tvx != null && tvd != null && tvf != null)
-            {
-                cloneTvx = (IndexInput)tvx.Clone();
-                cloneTvd = (IndexInput)tvd.Clone();
-                cloneTvf = (IndexInput)tvf.Clone();
-            }
-
-            return new Lucene40TermVectorsReader(fieldInfos, cloneTvx, cloneTvd, cloneTvf, size, numTotalDocs);
-        }
-
-        public override long RamBytesUsed()
-        {
-            return 0;
-        }
-
-        public override void CheckIntegrity()
-        {
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsWriter.cs b/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
deleted file mode 100644
index 41bc759..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene40/Lucene40TermVectorsWriter.cs
+++ /dev/null
@@ -1,527 +0,0 @@
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-
-namespace Lucene.Net.Codecs.Lucene40
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using ArrayUtil = Lucene.Net.Util.ArrayUtil;
-    using AtomicReader = Lucene.Net.Index.AtomicReader;
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using DataInput = Lucene.Net.Store.DataInput;
-    using Directory = Lucene.Net.Store.Directory;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using Fields = Lucene.Net.Index.Fields;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using MergeState = Lucene.Net.Index.MergeState;
-    using SegmentReader = Lucene.Net.Index.SegmentReader;
-    using StringHelper = Lucene.Net.Util.StringHelper;
-
-    // TODO: make a new 4.0 TV format that encodes better
-    //   - use startOffset (not endOffset) as base for delta on
-    //     next startOffset because today for syns or ngrams or
-    //     WDF or shingles etc. we are encoding negative vints
-    //     (= slow, 5 bytes per)
-    //   - if doc has no term vectors, write 0 into the tvx
-    //     file; saves a seek to tvd only to read a 0 vint (and
-    //     saves a byte in tvd)
-
-    /// <summary>
-    /// Lucene 4.0 Term Vectors writer.
-    /// <p>
-    /// It writes .tvd, .tvf, and .tvx files.
-    /// </summary>
-    /// <seealso cref= Lucene40TermVectorsFormat </seealso>
-    public sealed class Lucene40TermVectorsWriter : TermVectorsWriter
-    {
-        private readonly Directory directory;
-        private readonly string segment;
-        private IndexOutput tvx = null, tvd = null, tvf = null;
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene40TermVectorsWriter(Directory directory, string segment, IOContext context)
-        {
-            this.directory = directory;
-            this.segment = segment;
-            bool success = false;
-            try
-            {
-                // Open files for TermVector storage
-                tvx = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), context);
-                CodecUtil.WriteHeader(tvx, Lucene40TermVectorsReader.CODEC_NAME_INDEX, Lucene40TermVectorsReader.VERSION_CURRENT);
-                tvd = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context);
-                CodecUtil.WriteHeader(tvd, Lucene40TermVectorsReader.CODEC_NAME_DOCS, Lucene40TermVectorsReader.VERSION_CURRENT);
-                tvf = directory.CreateOutput(IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION), context);
-                CodecUtil.WriteHeader(tvf, Lucene40TermVectorsReader.CODEC_NAME_FIELDS, Lucene40TermVectorsReader.VERSION_CURRENT);
-                Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_INDEX == tvx.GetFilePointer());
-                Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_DOCS == tvd.GetFilePointer());
-                Debug.Assert(Lucene40TermVectorsReader.HEADER_LENGTH_FIELDS == tvf.GetFilePointer());
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    Abort();
-                }
-            }
-        }
-
-        public override void StartDocument(int numVectorFields)
-        {
-            lastFieldName = null;
-            this.numVectorFields = numVectorFields;
-            tvx.WriteInt64(tvd.GetFilePointer());
-            tvx.WriteInt64(tvf.GetFilePointer());
-            tvd.WriteVInt32(numVectorFields);
-            fieldCount = 0;
-            fps = ArrayUtil.Grow(fps, numVectorFields);
-        }
-
-        private long[] fps = new long[10]; // pointers to the tvf before writing each field
-        private int fieldCount = 0; // number of fields we have written so far for this document
-        private int numVectorFields = 0; // total number of fields we will write for this document
-        private string lastFieldName;
-
-        public override void StartField(FieldInfo info, int numTerms, bool positions, bool offsets, bool payloads)
-        {
-            Debug.Assert(lastFieldName == null || info.Name.CompareToOrdinal(lastFieldName) > 0, "fieldName=" + info.Name + " lastFieldName=" + lastFieldName);
-            lastFieldName = info.Name;
-            this.positions = positions;
-            this.offsets = offsets;
-            this.payloads = payloads;
-            lastTerm.Length = 0;
-            lastPayloadLength = -1; // force first payload to write its length
-            fps[fieldCount++] = tvf.GetFilePointer();
-            tvd.WriteVInt32(info.Number);
-            tvf.WriteVInt32(numTerms);
-            sbyte bits = 0x0;
-            if (positions)
-            {
-                bits |= Lucene40TermVectorsReader.STORE_POSITIONS_WITH_TERMVECTOR;
-            }
-            if (offsets)
-            {
-                bits |= Lucene40TermVectorsReader.STORE_OFFSET_WITH_TERMVECTOR;
-            }
-            if (payloads)
-            {
-                bits |= Lucene40TermVectorsReader.STORE_PAYLOAD_WITH_TERMVECTOR;
-            }
-            tvf.WriteByte((byte)bits);
-        }
-
-        public override void FinishDocument()
-        {
-            Debug.Assert(fieldCount == numVectorFields);
-            for (int i = 1; i < fieldCount; i++)
-            {
-                tvd.WriteVInt64(fps[i] - fps[i - 1]);
-            }
-        }
-
-        private readonly BytesRef lastTerm = new BytesRef(10);
-
-        // NOTE: we override addProx, so we don't need to buffer when indexing.
-        // we also don't buffer during bulk merges.
-        private int[] offsetStartBuffer = new int[10];
-
-        private int[] offsetEndBuffer = new int[10];
-        private BytesRef payloadData = new BytesRef(10);
-        private int bufferedIndex = 0;
-        private int bufferedFreq = 0;
-        private bool positions = false;
-        private bool offsets = false;
-        private bool payloads = false;
-
-        public override void StartTerm(BytesRef term, int freq)
-        {
-            int prefix = StringHelper.BytesDifference(lastTerm, term);
-            int suffix = term.Length - prefix;
-            tvf.WriteVInt32(prefix);
-            tvf.WriteVInt32(suffix);
-            tvf.WriteBytes(term.Bytes, term.Offset + prefix, suffix);
-            tvf.WriteVInt32(freq);
-            lastTerm.CopyBytes(term);
-            lastPosition = lastOffset = 0;
-
-            if (offsets && positions)
-            {
-                // we might need to buffer if its a non-bulk merge
-                offsetStartBuffer = ArrayUtil.Grow(offsetStartBuffer, freq);
-                offsetEndBuffer = ArrayUtil.Grow(offsetEndBuffer, freq);
-            }
-            bufferedIndex = 0;
-            bufferedFreq = freq;
-            payloadData.Length = 0;
-        }
-
-        internal int lastPosition = 0;
-        internal int lastOffset = 0;
-        internal int lastPayloadLength = -1; // force first payload to write its length
-
-        internal BytesRef scratch = new BytesRef(); // used only by this optimized flush below
-
-        public override void AddProx(int numProx, DataInput positions, DataInput offsets)
-        {
-            if (payloads)
-            {
-                // TODO, maybe overkill and just call super.addProx() in this case?
-                // we do avoid buffering the offsets in RAM though.
-                for (int i = 0; i < numProx; i++)
-                {
-                    int code = positions.ReadVInt32();
-                    if ((code & 1) == 1)
-                    {
-                        int length = positions.ReadVInt32();
-                        scratch.Grow(length);
-                        scratch.Length = length;
-                        positions.ReadBytes(scratch.Bytes, scratch.Offset, scratch.Length);
-                        WritePosition((int)((uint)code >> 1), scratch);
-                    }
-                    else
-                    {
-                        WritePosition((int)((uint)code >> 1), null);
-                    }
-                }
-                tvf.WriteBytes(payloadData.Bytes, payloadData.Offset, payloadData.Length);
-            }
-            else if (positions != null)
-            {
-                // pure positions, no payloads
-                for (int i = 0; i < numProx; i++)
-                {
-                    tvf.WriteVInt32((int)((uint)positions.ReadVInt32() >> 1));
-                }
-            }
-
-            if (offsets != null)
-            {
-                for (int i = 0; i < numProx; i++)
-                {
-                    tvf.WriteVInt32(offsets.ReadVInt32());
-                    tvf.WriteVInt32(offsets.ReadVInt32());
-                }
-            }
-        }
-
-        public override void AddPosition(int position, int startOffset, int endOffset, BytesRef payload)
-        {
-            if (positions && (offsets || payloads))
-            {
-                // write position delta
-                WritePosition(position - lastPosition, payload);
-                lastPosition = position;
-
-                // buffer offsets
-                if (offsets)
-                {
-                    offsetStartBuffer[bufferedIndex] = startOffset;
-                    offsetEndBuffer[bufferedIndex] = endOffset;
-                }
-
-                bufferedIndex++;
-            }
-            else if (positions)
-            {
-                // write position delta
-                WritePosition(position - lastPosition, payload);
-                lastPosition = position;
-            }
-            else if (offsets)
-            {
-                // write offset deltas
-                tvf.WriteVInt32(startOffset - lastOffset);
-                tvf.WriteVInt32(endOffset - startOffset);
-                lastOffset = endOffset;
-            }
-        }
-
-        public override void FinishTerm()
-        {
-            if (bufferedIndex > 0)
-            {
-                // dump buffer
-                Debug.Assert(positions && (offsets || payloads));
-                Debug.Assert(bufferedIndex == bufferedFreq);
-                if (payloads)
-                {
-                    tvf.WriteBytes(payloadData.Bytes, payloadData.Offset, payloadData.Length);
-                }
-                if (offsets)
-                {
-                    for (int i = 0; i < bufferedIndex; i++)
-                    {
-                        tvf.WriteVInt32(offsetStartBuffer[i] - lastOffset);
-                        tvf.WriteVInt32(offsetEndBuffer[i] - offsetStartBuffer[i]);
-                        lastOffset = offsetEndBuffer[i];
-                    }
-                }
-            }
-        }
-
-        private void WritePosition(int delta, BytesRef payload)
-        {
-            if (payloads)
-            {
-                int payloadLength = payload == null ? 0 : payload.Length;
-
-                if (payloadLength != lastPayloadLength)
-                {
-                    lastPayloadLength = payloadLength;
-                    tvf.WriteVInt32((delta << 1) | 1);
-                    tvf.WriteVInt32(payloadLength);
-                }
-                else
-                {
-                    tvf.WriteVInt32(delta << 1);
-                }
-                if (payloadLength > 0)
-                {
-                    if (payloadLength + payloadData.Length < 0)
-                    {
-                        // we overflowed the payload buffer, just throw UOE
-                        // having > System.Int32.MaxValue bytes of payload for a single term in a single doc is nuts.
-                        throw new System.NotSupportedException("A term cannot have more than System.Int32.MaxValue bytes of payload data in a single document");
-                    }
-                    payloadData.Append(payload);
-                }
-            }
-            else
-            {
-                tvf.WriteVInt32(delta);
-            }
-        }
-
-        public override void Abort()
-        {
-            try
-            {
-                Dispose();
-            }
-#pragma warning disable 168
-            catch (Exception ignored)
-#pragma warning restore 168
-            {
-            }
-            IOUtils.DeleteFilesIgnoringExceptions(directory, 
-                IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION), 
-                IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), 
-                IndexFileNames.SegmentFileName(segment, "", Lucene40TermVectorsReader.VECTORS_FIELDS_EXTENSION));
-        }
-
-        /// <summary>
-        /// Do a bulk copy of numDocs documents from reader to our
-        /// streams.  this is used to expedite merging, if the
-        /// field numbers are congruent.
-        /// </summary>
-        private void AddRawDocuments(Lucene40TermVectorsReader reader, int[] tvdLengths, int[] tvfLengths, int numDocs)
-        {
-            long tvdPosition = tvd.GetFilePointer();
-            long tvfPosition = tvf.GetFilePointer();
-            long tvdStart = tvdPosition;
-            long tvfStart = tvfPosition;
-            for (int i = 0; i < numDocs; i++)
-            {
-                tvx.WriteInt64(tvdPosition);
-                tvdPosition += tvdLengths[i];
-                tvx.WriteInt64(tvfPosition);
-                tvfPosition += tvfLengths[i];
-            }
-            tvd.CopyBytes(reader.TvdStream, tvdPosition - tvdStart);
-            tvf.CopyBytes(reader.TvfStream, tvfPosition - tvfStart);
-            Debug.Assert(tvd.GetFilePointer() == tvdPosition);
-            Debug.Assert(tvf.GetFilePointer() == tvfPosition);
-        }
-
-        public override int Merge(MergeState mergeState)
-        {
-            // Used for bulk-reading raw bytes for term vectors
-            int[] rawDocLengths = new int[MAX_RAW_MERGE_DOCS];
-            int[] rawDocLengths2 = new int[MAX_RAW_MERGE_DOCS];
-
-            int idx = 0;
-            int numDocs = 0;
-            for (int i = 0; i < mergeState.Readers.Count; i++)
-            {
-                AtomicReader reader = mergeState.Readers[i];
-
-                SegmentReader matchingSegmentReader = mergeState.MatchingSegmentReaders[idx++];
-                Lucene40TermVectorsReader matchingVectorsReader = null;
-                if (matchingSegmentReader != null)
-                {
-                    TermVectorsReader vectorsReader = matchingSegmentReader.TermVectorsReader;
-
-                    if (vectorsReader != null && vectorsReader is Lucene40TermVectorsReader)
-                    {
-                        matchingVectorsReader = (Lucene40TermVectorsReader)vectorsReader;
-                    }
-                }
-                if (reader.LiveDocs != null)
-                {
-                    numDocs += CopyVectorsWithDeletions(mergeState, matchingVectorsReader, reader, rawDocLengths, rawDocLengths2);
-                }
-                else
-                {
-                    numDocs += CopyVectorsNoDeletions(mergeState, matchingVectorsReader, reader, rawDocLengths, rawDocLengths2);
-                }
-            }
-            Finish(mergeState.FieldInfos, numDocs);
-            return numDocs;
-        }
-
-        /// <summary>
-        /// Maximum number of contiguous documents to bulk-copy
-        ///    when merging term vectors
-        /// </summary>
-        private const int MAX_RAW_MERGE_DOCS = 4192;
-
-        private int CopyVectorsWithDeletions(MergeState mergeState, Lucene40TermVectorsReader matchingVectorsReader, AtomicReader reader, int[] rawDocLengths, int[] rawDocLengths2)
-        {
-            int maxDoc = reader.MaxDoc;
-            IBits liveDocs = reader.LiveDocs;
-            int totalNumDocs = 0;
-            if (matchingVectorsReader != null)
-            {
-                // We can bulk-copy because the fieldInfos are "congruent"
-                for (int docNum = 0; docNum < maxDoc; )
-                {
-                    if (!liveDocs.Get(docNum))
-                    {
-                        // skip deleted docs
-                        ++docNum;
-                        continue;
-                    }
-                    // We can optimize this case (doing a bulk byte copy) since the field
-                    // numbers are identical
-                    int start = docNum, numDocs = 0;
-                    do
-                    {
-                        docNum++;
-                        numDocs++;
-                        if (docNum >= maxDoc)
-                        {
-                            break;
-                        }
-                        if (!liveDocs.Get(docNum))
-                        {
-                            docNum++;
-                            break;
-                        }
-                    } while (numDocs < MAX_RAW_MERGE_DOCS);
-
-                    matchingVectorsReader.RawDocs(rawDocLengths, rawDocLengths2, start, numDocs);
-                    AddRawDocuments(matchingVectorsReader, rawDocLengths, rawDocLengths2, numDocs);
-                    totalNumDocs += numDocs;
-                    mergeState.CheckAbort.Work(300 * numDocs);
-                }
-            }
-            else
-            {
-                for (int docNum = 0; docNum < maxDoc; docNum++)
-                {
-                    if (!liveDocs.Get(docNum))
-                    {
-                        // skip deleted docs
-                        continue;
-                    }
-
-                    // NOTE: it's very important to first assign to vectors then pass it to
-                    // termVectorsWriter.addAllDocVectors; see LUCENE-1282
-                    Fields vectors = reader.GetTermVectors(docNum);
-                    AddAllDocVectors(vectors, mergeState);
-                    totalNumDocs++;
-                    mergeState.CheckAbort.Work(300);
-                }
-            }
-            return totalNumDocs;
-        }
-
-        private int CopyVectorsNoDeletions(MergeState mergeState, Lucene40TermVectorsReader matchingVectorsReader, AtomicReader reader, int[] rawDocLengths, int[] rawDocLengths2)
-        {
-            int maxDoc = reader.MaxDoc;
-            if (matchingVectorsReader != null)
-            {
-                // We can bulk-copy because the fieldInfos are "congruent"
-                int docCount = 0;
-                while (docCount < maxDoc)
-                {
-                    int len = Math.Min(MAX_RAW_MERGE_DOCS, maxDoc - docCount);
-                    matchingVectorsReader.RawDocs(rawDocLengths, rawDocLengths2, docCount, len);
-                    AddRawDocuments(matchingVectorsReader, rawDocLengths, rawDocLengths2, len);
-                    docCount += len;
-                    mergeState.CheckAbort.Work(300 * len);
-                }
-            }
-            else
-            {
-                for (int docNum = 0; docNum < maxDoc; docNum++)
-                {
-                    // NOTE: it's very important to first assign to vectors then pass it to
-                    // termVectorsWriter.addAllDocVectors; see LUCENE-1282
-                    Fields vectors = reader.GetTermVectors(docNum);
-                    AddAllDocVectors(vectors, mergeState);
-                    mergeState.CheckAbort.Work(300);
-                }
-            }
-            return maxDoc;
-        }
-
-        public override void Finish(FieldInfos fis, int numDocs)
-        {
-            if (Lucene40TermVectorsReader.HEADER_LENGTH_INDEX + ((long)numDocs) * 16 != tvx.GetFilePointer())
-            // this is most likely a bug in Sun JRE 1.6.0_04/_05;
-            // we detect that the bug has struck, here, and
-            // throw an exception to prevent the corruption from
-            // entering the index.  See LUCENE-1282 for
-            // details.
-            {
-                throw new Exception("tvx size mismatch: mergedDocs is " + numDocs + " but tvx size is " + tvx.GetFilePointer() + " file=" + tvx.ToString() + "; now aborting this merge to prevent index corruption");
-            }
-        }
-
-        /// <summary>
-        /// Close all streams. </summary>
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                // make an effort to close all streams we can but remember and re-throw
-                // the first exception encountered in this process
-                IOUtils.Close(tvx, tvd, tvf);
-                tvx = tvd = tvf = null;
-            }
-        }
-
-        public override IComparer<BytesRef> Comparer
-        {
-            get
-            {
-                return BytesRef.UTF8SortedAsUnicodeComparer;
-            }
-        }
-    }
-}
\ No newline at end of file


Mime
View raw message