lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [32/62] [abbrv] [partial] lucenenet git commit: Renamed Lucene.Net.Core folder Lucene.Net because the dotnet.exe pack command doesn't allow creating a NuGet package with a different name than its folder. Working around it with the script was much more co
Date Tue, 04 Apr 2017 17:19:38 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42DocValuesProducer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42DocValuesProducer.cs b/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42DocValuesProducer.cs
deleted file mode 100644
index 4503588..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42DocValuesProducer.cs
+++ /dev/null
@@ -1,837 +0,0 @@
-using Lucene.Net.Index;
-using Lucene.Net.Store;
-using Lucene.Net.Support;
-using Lucene.Net.Util.Fst;
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using BinaryDocValues = Lucene.Net.Index.BinaryDocValues;
-    using IBits = Lucene.Net.Util.IBits;
-    using BlockPackedReader = Lucene.Net.Util.Packed.BlockPackedReader;
-    using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using ChecksumIndexInput = Lucene.Net.Store.ChecksumIndexInput;
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
-    using DocsEnum = Lucene.Net.Index.DocsEnum;
-    using DocValues = Lucene.Net.Index.DocValues;
-    using DocValuesType = Lucene.Net.Index.DocValuesType;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using Int32sRef = Lucene.Net.Util.Int32sRef;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using MonotonicBlockPackedReader = Lucene.Net.Util.Packed.MonotonicBlockPackedReader;
-    using NumericDocValues = Lucene.Net.Index.NumericDocValues;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-    using PagedBytes = Lucene.Net.Util.PagedBytes;
-    using PositiveInt32Outputs = Lucene.Net.Util.Fst.PositiveInt32Outputs;
-    using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-    using SortedDocValues = Lucene.Net.Index.SortedDocValues;
-    using SortedSetDocValues = Lucene.Net.Index.SortedSetDocValues;
-    using TermsEnum = Lucene.Net.Index.TermsEnum;
-    using Util = Lucene.Net.Util.Fst.Util;
-
-    /// <summary>
-    /// Reader for <seealso cref="Lucene42DocValuesFormat"/>
-    /// </summary>
-    internal class Lucene42DocValuesProducer : DocValuesProducer
-    {
-        // metadata maps (just file pointers and minimal stuff)
-        private readonly IDictionary<int, NumericEntry> numerics;
-
-        private readonly IDictionary<int, BinaryEntry> binaries;
-        private readonly IDictionary<int, FSTEntry> fsts;
-        private readonly IndexInput data;
-        private readonly int version;
-
-        // ram instances we have already loaded
-        private readonly IDictionary<int, NumericDocValues> numericInstances = new Dictionary<int, NumericDocValues>();
-
-        private readonly IDictionary<int, BinaryDocValues> binaryInstances = new Dictionary<int, BinaryDocValues>();
-        private readonly IDictionary<int, FST<long?>> fstInstances = new Dictionary<int, FST<long?>>();
-
-        private readonly int maxDoc;
-        private readonly AtomicInt64 ramBytesUsed;
-
-        internal const sbyte NUMBER = 0;
-        internal const sbyte BYTES = 1;
-        internal const sbyte FST = 2;
-
-        internal const int BLOCK_SIZE = 4096;
-
-        internal const sbyte DELTA_COMPRESSED = 0;
-        internal const sbyte TABLE_COMPRESSED = 1;
-        internal const sbyte UNCOMPRESSED = 2;
-        internal const sbyte GCD_COMPRESSED = 3;
-
-        internal const int VERSION_START = 0;
-        internal const int VERSION_GCD_COMPRESSION = 1;
-        internal const int VERSION_CHECKSUM = 2;
-        internal const int VERSION_CURRENT = VERSION_CHECKSUM;
-
-        internal Lucene42DocValuesProducer(SegmentReadState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension)
-        {
-            maxDoc = state.SegmentInfo.DocCount;
-            string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension);
-            // read in the entries from the metadata file.
-            ChecksumIndexInput @in = state.Directory.OpenChecksumInput(metaName, state.Context);
-            bool success = false;
-            ramBytesUsed = new AtomicInt64(RamUsageEstimator.ShallowSizeOfInstance(this.GetType()));
-            try
-            {
-                version = CodecUtil.CheckHeader(@in, metaCodec, VERSION_START, VERSION_CURRENT);
-                numerics = new Dictionary<int, NumericEntry>();
-                binaries = new Dictionary<int, BinaryEntry>();
-                fsts = new Dictionary<int, FSTEntry>();
-                ReadFields(@in, state.FieldInfos);
-
-                if (version >= VERSION_CHECKSUM)
-                {
-                    CodecUtil.CheckFooter(@in);
-                }
-                else
-                {
-#pragma warning disable 612, 618
-                    CodecUtil.CheckEOF(@in);
-#pragma warning restore 612, 618
-                }
-
-                success = true;
-            }
-            finally
-            {
-                if (success)
-                {
-                    IOUtils.Close(@in);
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(@in);
-                }
-            }
-
-            success = false;
-            try
-            {
-                string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension);
-                data = state.Directory.OpenInput(dataName, state.Context);
-                int version2 = CodecUtil.CheckHeader(data, dataCodec, VERSION_START, VERSION_CURRENT);
-                if (version != version2)
-                {
-                    throw new CorruptIndexException("Format versions mismatch");
-                }
-
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(this.data);
-                }
-            }
-        }
-
-        private void ReadFields(IndexInput meta, FieldInfos infos)
-        {
-            int fieldNumber = meta.ReadVInt32();
-            while (fieldNumber != -1)
-            {
-                // check should be: infos.fieldInfo(fieldNumber) != null, which incorporates negative check
-                // but docvalues updates are currently buggy here (loading extra stuff, etc): LUCENE-5616
-                if (fieldNumber < 0)
-                {
-                    // trickier to validate more: because we re-use for norms, because we use multiple entries
-                    // for "composite" types like sortedset, etc.
-                    throw new CorruptIndexException("Invalid field number: " + fieldNumber + ", input=" + meta);
-                }
-                int fieldType = meta.ReadByte();
-                if (fieldType == NUMBER)
-                {
-                    var entry = new NumericEntry();
-                    entry.Offset = meta.ReadInt64();
-                    entry.Format = (sbyte)meta.ReadByte();
-                    switch (entry.Format)
-                    {
-                        case DELTA_COMPRESSED:
-                        case TABLE_COMPRESSED:
-                        case GCD_COMPRESSED:
-                        case UNCOMPRESSED:
-                            break;
-
-                        default:
-                            throw new CorruptIndexException("Unknown format: " + entry.Format + ", input=" + meta);
-                    }
-                    if (entry.Format != UNCOMPRESSED)
-                    {
-                        entry.PackedInt32sVersion = meta.ReadVInt32();
-                    }
-                    numerics[fieldNumber] = entry;
-                }
-                else if (fieldType == BYTES)
-                {
-                    BinaryEntry entry = new BinaryEntry();
-                    entry.Offset = meta.ReadInt64();
-                    entry.NumBytes = meta.ReadInt64();
-                    entry.MinLength = meta.ReadVInt32();
-                    entry.MaxLength = meta.ReadVInt32();
-                    if (entry.MinLength != entry.MaxLength)
-                    {
-                        entry.PackedInt32sVersion = meta.ReadVInt32();
-                        entry.BlockSize = meta.ReadVInt32();
-                    }
-                    binaries[fieldNumber] = entry;
-                }
-                else if (fieldType == FST)
-                {
-                    FSTEntry entry = new FSTEntry();
-                    entry.Offset = meta.ReadInt64();
-                    entry.NumOrds = meta.ReadVInt64();
-                    fsts[fieldNumber] = entry;
-                }
-                else
-                {
-                    throw new CorruptIndexException("invalid entry type: " + fieldType + ", input=" + meta);
-                }
-                fieldNumber = meta.ReadVInt32();
-            }
-        }
-
-        public override NumericDocValues GetNumeric(FieldInfo field)
-        {
-            lock (this)
-            {
-                NumericDocValues instance;
-                if (!numericInstances.TryGetValue(field.Number, out instance) || instance == null)
-                {
-                    instance = LoadNumeric(field);
-                    numericInstances[field.Number] = instance;
-                }
-                return instance;
-            }
-        }
-
-        public override long RamBytesUsed()
-        {
-            return ramBytesUsed.Get();
-        }
-
-        public override void CheckIntegrity()
-        {
-            if (version >= VERSION_CHECKSUM)
-            {
-                CodecUtil.ChecksumEntireFile(data);
-            }
-        }
-
-        private NumericDocValues LoadNumeric(FieldInfo field)
-        {
-            NumericEntry entry = numerics[field.Number];
-            data.Seek(entry.Offset);
-            switch (entry.Format)
-            {
-                case TABLE_COMPRESSED:
-                    int size = data.ReadVInt32();
-                    if (size > 256)
-                    {
-                        throw new CorruptIndexException("TABLE_COMPRESSED cannot have more than 256 distinct values, input=" + data);
-                    }
-                    var decode = new long[size];
-                    for (int i = 0; i < decode.Length; i++)
-                    {
-                        decode[i] = data.ReadInt64();
-                    }
-                    int formatID = data.ReadVInt32();
-                    int bitsPerValue = data.ReadVInt32();
-                    PackedInt32s.Reader ordsReader = PackedInt32s.GetReaderNoHeader(data, PackedInt32s.Format.ById(formatID), entry.PackedInt32sVersion, maxDoc, bitsPerValue);
-                    ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(decode) + ordsReader.RamBytesUsed());
-                    return new NumericDocValuesAnonymousInnerClassHelper(decode, ordsReader);
-
-                case DELTA_COMPRESSED:
-                    int blockSize = data.ReadVInt32();
-                    var reader = new BlockPackedReader(data, entry.PackedInt32sVersion, blockSize, maxDoc, false);
-                    ramBytesUsed.AddAndGet(reader.RamBytesUsed());
-                    return reader;
-
-                case UNCOMPRESSED:
-                    byte[] bytes = new byte[maxDoc];
-                    data.ReadBytes(bytes, 0, bytes.Length);
-                    ramBytesUsed.AddAndGet(RamUsageEstimator.SizeOf(bytes));
-                    return new NumericDocValuesAnonymousInnerClassHelper2(this, bytes);
-
-                case GCD_COMPRESSED:
-                    long min = data.ReadInt64();
-                    long mult = data.ReadInt64();
-                    int quotientBlockSize = data.ReadVInt32();
-                    BlockPackedReader quotientReader = new BlockPackedReader(data, entry.PackedInt32sVersion, quotientBlockSize, maxDoc, false);
-                    ramBytesUsed.AddAndGet(quotientReader.RamBytesUsed());
-                    return new NumericDocValuesAnonymousInnerClassHelper3(min, mult, quotientReader);
-
-                default:
-                    throw new InvalidOperationException();
-            }
-        }
-
-        private class NumericDocValuesAnonymousInnerClassHelper : NumericDocValues
-        {
-            private readonly long[] decode;
-            private readonly PackedInt32s.Reader ordsReader;
-
-            public NumericDocValuesAnonymousInnerClassHelper(long[] decode, PackedInt32s.Reader ordsReader)
-            {
-                this.decode = decode;
-                this.ordsReader = ordsReader;
-            }
-
-            public override long Get(int docID)
-            {
-                return decode[(int)ordsReader.Get(docID)];
-            }
-        }
-
-        private class NumericDocValuesAnonymousInnerClassHelper2 : NumericDocValues
-        {
-            private readonly byte[] bytes;
-
-            public NumericDocValuesAnonymousInnerClassHelper2(Lucene42DocValuesProducer outerInstance, byte[] bytes)
-            {
-                this.bytes = bytes;
-            }
-
-            public override long Get(int docID)
-            {
-                return (sbyte)bytes[docID];
-            }
-        }
-
-        private class NumericDocValuesAnonymousInnerClassHelper3 : NumericDocValues
-        {
-            private readonly long min;
-            private readonly long mult;
-            private readonly BlockPackedReader quotientReader;
-
-            public NumericDocValuesAnonymousInnerClassHelper3(long min, long mult, BlockPackedReader quotientReader)
-            {
-                this.min = min;
-                this.mult = mult;
-                this.quotientReader = quotientReader;
-            }
-
-            public override long Get(int docID)
-            {
-                return min + mult * quotientReader.Get(docID);
-            }
-        }
-
-        public override BinaryDocValues GetBinary(FieldInfo field)
-        {
-            lock (this)
-            {
-                BinaryDocValues instance;
-                if (!binaryInstances.TryGetValue(field.Number, out instance) || instance == null)
-                {
-                    instance = LoadBinary(field);
-                    binaryInstances[field.Number] = instance;
-                }
-                return instance;
-            }
-        }
-
-        private BinaryDocValues LoadBinary(FieldInfo field)
-        {
-            BinaryEntry entry = binaries[field.Number];
-            data.Seek(entry.Offset);
-            PagedBytes bytes = new PagedBytes(16);
-            bytes.Copy(data, entry.NumBytes);
-            PagedBytes.Reader bytesReader = bytes.Freeze(true);
-            if (entry.MinLength == entry.MaxLength)
-            {
-                int fixedLength = entry.MinLength;
-                ramBytesUsed.AddAndGet(bytes.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper(bytesReader, fixedLength);
-            }
-            else
-            {
-                MonotonicBlockPackedReader addresses = new MonotonicBlockPackedReader(data, entry.PackedInt32sVersion, entry.BlockSize, maxDoc, false);
-                ramBytesUsed.AddAndGet(bytes.RamBytesUsed() + addresses.RamBytesUsed());
-                return new BinaryDocValuesAnonymousInnerClassHelper2(bytesReader, addresses);
-            }
-        }
-
-        private class BinaryDocValuesAnonymousInnerClassHelper : BinaryDocValues
-        {
-            private readonly PagedBytes.Reader bytesReader;
-            private readonly int fixedLength;
-
-            public BinaryDocValuesAnonymousInnerClassHelper(PagedBytes.Reader bytesReader, int fixedLength)
-            {
-                this.bytesReader = bytesReader;
-                this.fixedLength = fixedLength;
-            }
-
-            public override void Get(int docID, BytesRef result)
-            {
-                bytesReader.FillSlice(result, fixedLength * (long)docID, fixedLength);
-            }
-        }
-
-        private class BinaryDocValuesAnonymousInnerClassHelper2 : BinaryDocValues
-        {
-            private readonly PagedBytes.Reader bytesReader;
-            private readonly MonotonicBlockPackedReader addresses;
-
-            public BinaryDocValuesAnonymousInnerClassHelper2(PagedBytes.Reader bytesReader, MonotonicBlockPackedReader addresses)
-            {
-                this.bytesReader = bytesReader;
-                this.addresses = addresses;
-            }
-
-            public override void Get(int docID, BytesRef result)
-            {
-                long startAddress = docID == 0 ? 0 : addresses.Get(docID - 1);
-                long endAddress = addresses.Get(docID);
-                bytesReader.FillSlice(result, startAddress, (int)(endAddress - startAddress));
-            }
-        }
-
-        public override SortedDocValues GetSorted(FieldInfo field)
-        {
-            FSTEntry entry = fsts[field.Number];
-            FST<long?> instance;
-            lock (this)
-            {
-                if (!fstInstances.TryGetValue(field.Number, out instance) || instance == null)
-                {
-                    data.Seek(entry.Offset);
-                    instance = new FST<long?>(data, PositiveInt32Outputs.Singleton);
-                    ramBytesUsed.AddAndGet(instance.GetSizeInBytes());
-                    fstInstances[field.Number] = instance;
-                }
-            }
-            var docToOrd = GetNumeric(field);
-            var fst = instance;
-
-            // per-thread resources
-            var @in = fst.GetBytesReader();
-            var firstArc = new FST.Arc<long?>();
-            var scratchArc = new FST.Arc<long?>();
-            var scratchInts = new Int32sRef();
-            var fstEnum = new BytesRefFSTEnum<long?>(fst);
-
-            return new SortedDocValuesAnonymousInnerClassHelper(entry, docToOrd, fst, @in, firstArc, scratchArc, scratchInts, fstEnum);
-        }
-
-        private class SortedDocValuesAnonymousInnerClassHelper : SortedDocValues
-        {
-            private readonly FSTEntry entry;
-            private readonly NumericDocValues docToOrd;
-            private readonly FST<long?> fst;
-            private readonly FST.BytesReader @in;
-            private readonly FST.Arc<long?> firstArc;
-            private readonly FST.Arc<long?> scratchArc;
-            private readonly Int32sRef scratchInts;
-            private readonly BytesRefFSTEnum<long?> fstEnum;
-
-            public SortedDocValuesAnonymousInnerClassHelper(FSTEntry entry, NumericDocValues docToOrd, FST<long?> fst, FST.BytesReader @in, FST.Arc<long?> firstArc, FST.Arc<long?> scratchArc, Int32sRef scratchInts, BytesRefFSTEnum<long?> fstEnum)
-            {
-                this.entry = entry;
-                this.docToOrd = docToOrd;
-                this.fst = fst;
-                this.@in = @in;
-                this.firstArc = firstArc;
-                this.scratchArc = scratchArc;
-                this.scratchInts = scratchInts;
-                this.fstEnum = fstEnum;
-            }
-
-            public override int GetOrd(int docID)
-            {
-                return (int)docToOrd.Get(docID);
-            }
-
-            public override void LookupOrd(int ord, BytesRef result)
-            {
-                try
-                {
-                    @in.Position = 0;
-                    fst.GetFirstArc(firstArc);
-                    Int32sRef output = Lucene.Net.Util.Fst.Util.GetByOutput(fst, ord, @in, firstArc, scratchArc, scratchInts);
-                    result.Bytes = new byte[output.Length];
-                    result.Offset = 0;
-                    result.Length = 0;
-                    Util.ToBytesRef(output, result);
-                }
-                catch (System.IO.IOException bogus)
-                {
-                    throw new Exception(bogus.ToString(), bogus);
-                }
-            }
-
-            public override int LookupTerm(BytesRef key)
-            {
-                try
-                {
-                    BytesRefFSTEnum.InputOutput<long?> o = fstEnum.SeekCeil(key);
-                    if (o == null)
-                    {
-                        return -ValueCount - 1;
-                    }
-                    else if (o.Input.Equals(key))
-                    {
-                        return (int)o.Output.GetValueOrDefault();
-                    }
-                    else
-                    {
-                        return (int)-o.Output.GetValueOrDefault() - 1;
-                    }
-                }
-                catch (System.IO.IOException bogus)
-                {
-                    throw new Exception(bogus.ToString(), bogus);
-                }
-            }
-
-            public override int ValueCount
-            {
-                get
-                {
-                    return (int)entry.NumOrds;
-                }
-            }
-
-            public override TermsEnum GetTermsEnum()
-            {
-                return new FSTTermsEnum(fst);
-            }
-        }
-
-        public override SortedSetDocValues GetSortedSet(FieldInfo field)
-        {
-            FSTEntry entry = fsts[field.Number];
-            if (entry.NumOrds == 0)
-            {
-                return DocValues.EMPTY_SORTED_SET; // empty FST!
-            }
-            FST<long?> instance;
-            lock (this)
-            {
-                if (!fstInstances.TryGetValue(field.Number, out instance) || instance == null)
-                {
-                    data.Seek(entry.Offset);
-                    instance = new FST<long?>(data, PositiveInt32Outputs.Singleton);
-                    ramBytesUsed.AddAndGet(instance.GetSizeInBytes());
-                    fstInstances[field.Number] = instance;
-                }
-            }
-            BinaryDocValues docToOrds = GetBinary(field);
-            FST<long?> fst = instance;
-
-            // per-thread resources
-            var @in = fst.GetBytesReader();
-            var firstArc = new FST.Arc<long?>();
-            var scratchArc = new FST.Arc<long?>();
-            var scratchInts = new Int32sRef();
-            var fstEnum = new BytesRefFSTEnum<long?>(fst);
-            var @ref = new BytesRef();
-            var input = new ByteArrayDataInput();
-            return new SortedSetDocValuesAnonymousInnerClassHelper(entry, docToOrds, fst, @in, firstArc, scratchArc, scratchInts, fstEnum, @ref, input);
-        }
-
-        private class SortedSetDocValuesAnonymousInnerClassHelper : SortedSetDocValues
-        {
-            private readonly FSTEntry entry;
-            private readonly BinaryDocValues docToOrds;
-            private readonly FST<long?> fst;
-            private readonly FST.BytesReader @in;
-            private readonly FST.Arc<long?> firstArc;
-            private readonly FST.Arc<long?> scratchArc;
-            private readonly Int32sRef scratchInts;
-            private readonly BytesRefFSTEnum<long?> fstEnum;
-            private readonly BytesRef @ref;
-            private readonly ByteArrayDataInput input;
-
-            public SortedSetDocValuesAnonymousInnerClassHelper(FSTEntry entry, BinaryDocValues docToOrds, FST<long?> fst, FST.BytesReader @in, FST.Arc<long?> firstArc, FST.Arc<long?> scratchArc, Int32sRef scratchInts, BytesRefFSTEnum<long?> fstEnum, BytesRef @ref, ByteArrayDataInput input)
-            {
-                this.entry = entry;
-                this.docToOrds = docToOrds;
-                this.fst = fst;
-                this.@in = @in;
-                this.firstArc = firstArc;
-                this.scratchArc = scratchArc;
-                this.scratchInts = scratchInts;
-                this.fstEnum = fstEnum;
-                this.@ref = @ref;
-                this.input = input;
-            }
-
-            private long currentOrd;
-
-            public override long NextOrd()
-            {
-                if (input.Eof)
-                {
-                    return NO_MORE_ORDS;
-                }
-                else
-                {
-                    currentOrd += input.ReadVInt64();
-                    return currentOrd;
-                }
-            }
-
-            public override void SetDocument(int docID)
-            {
-                docToOrds.Get(docID, @ref);
-                input.Reset(@ref.Bytes, @ref.Offset, @ref.Length);
-                currentOrd = 0;
-            }
-
-            public override void LookupOrd(long ord, BytesRef result)
-            {
-                try
-                {
-                    @in.Position = 0;
-                    fst.GetFirstArc(firstArc);
-                    Int32sRef output = Lucene.Net.Util.Fst.Util.GetByOutput(fst, ord, @in, firstArc, scratchArc, scratchInts);
-                    result.Bytes = new byte[output.Length];
-                    result.Offset = 0;
-                    result.Length = 0;
-                    Lucene.Net.Util.Fst.Util.ToBytesRef(output, result);
-                }
-                catch (System.IO.IOException bogus)
-                {
-                    throw new Exception(bogus.ToString(), bogus);
-                }
-            }
-
-            public override long LookupTerm(BytesRef key)
-            {
-                try
-                {
-                    var o = fstEnum.SeekCeil(key);
-                    if (o == null)
-                    {
-                        return -ValueCount - 1;
-                    }
-                    else if (o.Input.Equals(key))
-                    {
-                        return (int)o.Output.GetValueOrDefault();
-                    }
-                    else
-                    {
-                        return -o.Output.GetValueOrDefault() - 1;
-                    }
-                }
-                catch (System.IO.IOException bogus)
-                {
-                    throw new Exception(bogus.ToString(), bogus);
-                }
-            }
-
-            public override long ValueCount
-            {
-                get
-                {
-                    return entry.NumOrds;
-                }
-            }
-
-            public override TermsEnum GetTermsEnum()
-            {
-                return new FSTTermsEnum(fst);
-            }
-        }
-
-        public override IBits GetDocsWithField(FieldInfo field)
-        {
-            if (field.DocValuesType == DocValuesType.SORTED_SET)
-            {
-                return DocValues.DocsWithValue(GetSortedSet(field), maxDoc);
-            }
-            else
-            {
-                return new Lucene.Net.Util.Bits.MatchAllBits(maxDoc);
-            }
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                data.Dispose();
-            }
-        }
-
-        internal class NumericEntry
-        {
-            internal long Offset { get; set; }
-            internal sbyte Format { get; set; }
-
-            /// <summary>
-            /// NOTE: This was packedIntsVersion (field) in Lucene
-            /// </summary>
-            internal int PackedInt32sVersion { get; set; }
-        }
-
-        internal class BinaryEntry
-        {
-            internal long Offset { get; set; }
-            internal long NumBytes { get; set; }
-            internal int MinLength { get; set; }
-            internal int MaxLength { get; set; }
-
-            /// <summary>
-            /// NOTE: This was packedIntsVersion (field) in Lucene
-            /// </summary>
-            internal int PackedInt32sVersion { get; set; }
-            internal int BlockSize { get; set; }
-        }
-
-        internal class FSTEntry
-        {
-            internal long Offset { get; set; }
-            internal long NumOrds { get; set; }
-        }
-
-        // exposes FSTEnum directly as a TermsEnum: avoids binary-search next()
-        internal class FSTTermsEnum : TermsEnum
-        {
-            internal readonly BytesRefFSTEnum<long?> @in;
-
-            // this is all for the complicated seek(ord)...
-            // maybe we should add a FSTEnum that supports this operation?
-            internal readonly FST<long?> fst;
-
-            internal readonly FST.BytesReader bytesReader;
-            internal readonly FST.Arc<long?> firstArc = new FST.Arc<long?>();
-            internal readonly FST.Arc<long?> scratchArc = new FST.Arc<long?>();
-            internal readonly Int32sRef scratchInts = new Int32sRef();
-            internal readonly BytesRef scratchBytes = new BytesRef();
-
-            internal FSTTermsEnum(FST<long?> fst)
-            {
-                this.fst = fst;
-                @in = new BytesRefFSTEnum<long?>(fst);
-                bytesReader = fst.GetBytesReader();
-            }
-
-            public override BytesRef Next()
-            {
-                var io = @in.Next();
-                if (io == null)
-                {
-                    return null;
-                }
-                else
-                {
-                    return io.Input;
-                }
-            }
-
-            public override IComparer<BytesRef> Comparer
-            {
-                get
-                {
-                    return BytesRef.UTF8SortedAsUnicodeComparer;
-                }
-            }
-
-            public override SeekStatus SeekCeil(BytesRef text)
-            {
-                if (@in.SeekCeil(text) == null)
-                {
-                    return SeekStatus.END;
-                }
-                else if (Term.Equals(text))
-                {
-                    // TODO: add SeekStatus to FSTEnum like in https://issues.apache.org/jira/browse/LUCENE-3729
-                    // to remove this comparision?
-                    return SeekStatus.FOUND;
-                }
-                else
-                {
-                    return SeekStatus.NOT_FOUND;
-                }
-            }
-
-            public override bool SeekExact(BytesRef text)
-            {
-                if (@in.SeekExact(text) == null)
-                {
-                    return false;
-                }
-                else
-                {
-                    return true;
-                }
-            }
-
-            public override void SeekExact(long ord)
-            {
-                // TODO: would be better to make this simpler and faster.
-                // but we dont want to introduce a bug that corrupts our enum state!
-                bytesReader.Position = 0;
-                fst.GetFirstArc(firstArc);
-                Int32sRef output = Lucene.Net.Util.Fst.Util.GetByOutput(fst, ord, bytesReader, firstArc, scratchArc, scratchInts);
-                scratchBytes.Bytes = new byte[output.Length];
-                scratchBytes.Offset = 0;
-                scratchBytes.Length = 0;
-                Lucene.Net.Util.Fst.Util.ToBytesRef(output, scratchBytes);
-                // TODO: we could do this lazily, better to try to push into FSTEnum though?
-                @in.SeekExact(scratchBytes);
-            }
-
-            public override BytesRef Term
-            {
-                get { return @in.Current.Input; }
-            }
-
-            public override long Ord
-            {
-                get { return @in.Current.Output.GetValueOrDefault(); }
-            }
-
-            public override int DocFreq
-            {
-                get { throw new System.NotSupportedException(); }
-            }
-
-            public override long TotalTermFreq
-            {
-                get { throw new System.NotSupportedException(); }
-            }
-
-            public override DocsEnum Docs(IBits liveDocs, DocsEnum reuse, DocsFlags flags)
-            {
-                throw new System.NotSupportedException();
-            }
-
-            public override DocsAndPositionsEnum DocsAndPositions(IBits liveDocs, DocsAndPositionsEnum reuse, DocsAndPositionsFlags flags)
-            {
-                throw new System.NotSupportedException();
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosFormat.cs b/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosFormat.cs
deleted file mode 100644
index 8cc7e9b..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosFormat.cs
+++ /dev/null
@@ -1,130 +0,0 @@
-using System;
-
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    // javadoc
-    // javadoc
-
-    /// <summary>
-    /// Lucene 4.2 Field Infos format.
-    /// <p>
-    /// <p>Field names are stored in the field info file, with suffix <tt>.fnm</tt>.</p>
-    /// <p>FieldInfos (.fnm) --&gt; Header,FieldsCount, &lt;FieldName,FieldNumber,
-    /// FieldBits,DocValuesBits,Attributes&gt; <sup>FieldsCount</sup></p>
-    /// <p>Data types:
-    /// <ul>
-    ///   <li>Header --&gt; <seealso cref="CodecUtil#checkHeader CodecHeader"/></li>
-    ///   <li>FieldsCount --&gt; <seealso cref="DataOutput#writeVInt VInt"/></li>
-    ///   <li>FieldName --&gt; <seealso cref="DataOutput#writeString String"/></li>
-    ///   <li>FieldBits, DocValuesBits --&gt; <seealso cref="DataOutput#writeByte Byte"/></li>
-    ///   <li>FieldNumber --&gt; <seealso cref="DataOutput#writeInt VInt"/></li>
-    ///   <li>Attributes --&gt; <seealso cref="DataOutput#writeStringStringMap Map&lt;String,String&gt;"/></li>
-    /// </ul>
-    /// </p>
-    /// Field Descriptions:
-    /// <ul>
-    ///   <li>FieldsCount: the number of fields in this file.</li>
-    ///   <li>FieldName: name of the field as a UTF-8 String.</li>
-    ///   <li>FieldNumber: the field's number. Note that unlike previous versions of
-    ///       Lucene, the fields are not numbered implicitly by their order in the
-    ///       file, instead explicitly.</li>
-    ///   <li>FieldBits: a byte containing field options.
-    ///       <ul>
-    ///         <li>The low-order bit is one for indexed fields, and zero for non-indexed
-    ///             fields.</li>
-    ///         <li>The second lowest-order bit is one for fields that have term vectors
-    ///             stored, and zero for fields without term vectors.</li>
-    ///         <li>If the third lowest order-bit is set (0x4), offsets are stored into
-    ///             the postings list in addition to positions.</li>
-    ///         <li>Fourth bit is unused.</li>
-    ///         <li>If the fifth lowest-order bit is set (0x10), norms are omitted for the
-    ///             indexed field.</li>
-    ///         <li>If the sixth lowest-order bit is set (0x20), payloads are stored for the
-    ///             indexed field.</li>
-    ///         <li>If the seventh lowest-order bit is set (0x40), term frequencies and
-    ///             positions omitted for the indexed field.</li>
-    ///         <li>If the eighth lowest-order bit is set (0x80), positions are omitted for the
-    ///             indexed field.</li>
-    ///       </ul>
-    ///    </li>
-    ///    <li>DocValuesBits: a byte containing per-document value types. The type
-    ///        recorded as two four-bit integers, with the high-order bits representing
-    ///        <code>norms</code> options, and the low-order bits representing
-    ///        {@code DocValues} options. Each four-bit integer can be decoded as such:
-    ///        <ul>
-    ///          <li>0: no DocValues for this field.</li>
-    ///          <li>1: NumericDocValues. (<seealso cref="DocValuesType#NUMERIC"/>)</li>
-    ///          <li>2: BinaryDocValues. ({@code DocValuesType#BINARY})</li>
-    ///          <li>3: SortedDocValues. ({@code DocValuesType#SORTED})</li>
-    ///        </ul>
-    ///    </li>
-    ///    <li>Attributes: a key-value map of codec-private attributes.</li>
-    /// </ul>
-    ///
-    /// @lucene.experimental </summary>
-    /// @deprecated Only for reading old 4.2-4.5 segments
-    [Obsolete("Only for reading old 4.2-4.5 segments")]
-    public class Lucene42FieldInfosFormat : FieldInfosFormat
-    {
-        private readonly FieldInfosReader reader = new Lucene42FieldInfosReader();
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene42FieldInfosFormat()
-        {
-        }
-
-        public override FieldInfosReader FieldInfosReader
-        {
-            get
-            {
-                return reader;
-            }
-        }
-
-        public override FieldInfosWriter FieldInfosWriter
-        {
-            get
-            {
-                throw new System.NotSupportedException("this codec can only be used for reading");
-            }
-        }
-
-        /// <summary>
-        /// Extension of field infos </summary>
-        internal const string EXTENSION = "fnm";
-
-        // Codec header
-        internal const string CODEC_NAME = "Lucene42FieldInfos";
-
-        internal const int FORMAT_START = 0;
-        internal const int FORMAT_CURRENT = FORMAT_START;
-
-        // Field flags
-        internal const sbyte IS_INDEXED = 0x1;
-
-        internal const sbyte STORE_TERMVECTOR = 0x2;
-        internal const sbyte STORE_OFFSETS_IN_POSTINGS = 0x4;
-        internal const sbyte OMIT_NORMS = 0x10;
-        internal const sbyte STORE_PAYLOADS = 0x20;
-        internal const sbyte OMIT_TERM_FREQ_AND_POSITIONS = 0x40;
-        internal const sbyte OMIT_POSITIONS = -128;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosReader.cs b/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosReader.cs
deleted file mode 100644
index b81c62d..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42FieldInfosReader.cs
+++ /dev/null
@@ -1,151 +0,0 @@
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
-    using Directory = Lucene.Net.Store.Directory;
-    using DocValuesType = Lucene.Net.Index.DocValuesType;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FieldInfos = Lucene.Net.Index.FieldInfos;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexInput = Lucene.Net.Store.IndexInput;
-    using IndexOptions = Lucene.Net.Index.IndexOptions;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /// <summary>
-    /// Lucene 4.2 FieldInfos reader.
-    ///
-    /// @lucene.experimental </summary>
-    /// @deprecated Only for reading old 4.2-4.5 segments
-    /// <seealso cref= Lucene42FieldInfosFormat </seealso>
-    [Obsolete("Only for reading old 4.2-4.5 segments")]
-    internal sealed class Lucene42FieldInfosReader : FieldInfosReader
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene42FieldInfosReader()
-        {
-        }
-
-        public override FieldInfos Read(Directory directory, string segmentName, string segmentSuffix, IOContext iocontext)
-        {
-            string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION);
-            IndexInput input = directory.OpenInput(fileName, iocontext);
-
-            bool success = false;
-            try
-            {
-                CodecUtil.CheckHeader(input, Lucene42FieldInfosFormat.CODEC_NAME, 
-                                            Lucene42FieldInfosFormat.FORMAT_START, 
-                                            Lucene42FieldInfosFormat.FORMAT_CURRENT);
-
-                int size = input.ReadVInt32(); //read in the size
-                FieldInfo[] infos = new FieldInfo[size];
-
-                for (int i = 0; i < size; i++)
-                {
-                    string name = input.ReadString();
-                    int fieldNumber = input.ReadVInt32();
-                    sbyte bits = (sbyte)input.ReadByte();
-                    bool isIndexed = (bits & Lucene42FieldInfosFormat.IS_INDEXED) != 0;
-                    bool storeTermVector = (bits & Lucene42FieldInfosFormat.STORE_TERMVECTOR) != 0;
-                    bool omitNorms = (bits & Lucene42FieldInfosFormat.OMIT_NORMS) != 0;
-                    bool storePayloads = (bits & Lucene42FieldInfosFormat.STORE_PAYLOADS) != 0;
-                    IndexOptions indexOptions;
-                    if (!isIndexed)
-                    {
-                        indexOptions = IndexOptions.NONE;
-                    }
-                    else if ((bits & Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS) != 0)
-                    {
-                        indexOptions = IndexOptions.DOCS_ONLY;
-                    }
-                    else if ((bits & Lucene42FieldInfosFormat.OMIT_POSITIONS) != 0)
-                    {
-                        indexOptions = IndexOptions.DOCS_AND_FREQS;
-                    }
-                    else if ((bits & Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS) != 0)
-                    {
-                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
-                    }
-                    else
-                    {
-                        indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
-                    }
-
-                    // DV Types are packed in one byte
-                    sbyte val = (sbyte)input.ReadByte();
-                    DocValuesType docValuesType = GetDocValuesType(input, (sbyte)(val & 0x0F));
-                    DocValuesType normsType = GetDocValuesType(input, (sbyte)(((int)((uint)val >> 4)) & 0x0F));
-                    IDictionary<string, string> attributes = input.ReadStringStringMap();
-                    infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, 
-                        omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.UnmodifiableMap(attributes));
-                }
-
-                CodecUtil.CheckEOF(input);
-                FieldInfos fieldInfos = new FieldInfos(infos);
-                success = true;
-                return fieldInfos;
-            }
-            finally
-            {
-                if (success)
-                {
-                    input.Dispose();
-                }
-                else
-                {
-                    IOUtils.CloseWhileHandlingException(input);
-                }
-            }
-        }
-
-        private static DocValuesType GetDocValuesType(IndexInput input, sbyte b)
-        {
-            if (b == 0)
-            {
-                return DocValuesType.NONE;
-            }
-            else if (b == 1)
-            {
-                return DocValuesType.NUMERIC;
-            }
-            else if (b == 2)
-            {
-                return DocValuesType.BINARY;
-            }
-            else if (b == 3)
-            {
-                return DocValuesType.SORTED;
-            }
-            else if (b == 4)
-            {
-                return DocValuesType.SORTED_SET;
-            }
-            else
-            {
-                throw new CorruptIndexException("invalid docvalues byte: " + b + " (resource=" + input + ")");
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsConsumer.cs b/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsConsumer.cs
deleted file mode 100644
index fa445de..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsConsumer.cs
+++ /dev/null
@@ -1,248 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Linq;
-
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using BlockPackedWriter = Lucene.Net.Util.Packed.BlockPackedWriter;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using FieldInfo = Lucene.Net.Index.FieldInfo;
-    using FormatAndBits = Lucene.Net.Util.Packed.PackedInt32s.FormatAndBits;
-    using IndexFileNames = Lucene.Net.Index.IndexFileNames;
-    using IndexOutput = Lucene.Net.Store.IndexOutput;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using MathUtil = Lucene.Net.Util.MathUtil;
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Writer for <seealso cref="Lucene42NormsFormat"/>
-    /// </summary>
-    internal class Lucene42NormsConsumer : DocValuesConsumer
-    {
-        internal const sbyte NUMBER = 0;
-
-        internal const int BLOCK_SIZE = 4096;
-
-        internal const sbyte DELTA_COMPRESSED = 0;
-        internal const sbyte TABLE_COMPRESSED = 1;
-        internal const sbyte UNCOMPRESSED = 2;
-        internal const sbyte GCD_COMPRESSED = 3;
-
-        internal IndexOutput data, meta;
-        internal readonly int maxDoc;
-        internal readonly float acceptableOverheadRatio;
-
-        internal Lucene42NormsConsumer(SegmentWriteState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension, float acceptableOverheadRatio)
-        {
-            this.acceptableOverheadRatio = acceptableOverheadRatio;
-            maxDoc = state.SegmentInfo.DocCount;
-            bool success = false;
-            try
-            {
-                string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension);
-                data = state.Directory.CreateOutput(dataName, state.Context);
-                CodecUtil.WriteHeader(data, dataCodec, Lucene42DocValuesProducer.VERSION_CURRENT);
-                string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension);
-                meta = state.Directory.CreateOutput(metaName, state.Context);
-                CodecUtil.WriteHeader(meta, metaCodec, Lucene42DocValuesProducer.VERSION_CURRENT);
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    IOUtils.CloseWhileHandlingException(this);
-                }
-            }
-        }
-
-        public override void AddNumericField(FieldInfo field, IEnumerable<long?> values)
-        {
-            meta.WriteVInt32(field.Number);
-            meta.WriteByte((byte)NUMBER);
-            meta.WriteInt64(data.GetFilePointer());
-            long minValue = long.MaxValue;
-            long maxValue = long.MinValue;
-            long gcd = 0;
-            // TODO: more efficient?
-            HashSet<long> uniqueValues = null;
-            if (true)
-            {
-                uniqueValues = new HashSet<long>();
-
-                long count = 0;
-                foreach (long? nv in values)
-                {
-                    Debug.Assert(nv != null);
-                    long v = nv.Value;
-
-                    if (gcd != 1)
-                    {
-                        if (v < long.MinValue / 2 || v > long.MaxValue / 2)
-                        {
-                            // in that case v - minValue might overflow and make the GCD computation return
-                            // wrong results. Since these extreme values are unlikely, we just discard
-                            // GCD computation for them
-                            gcd = 1;
-                        } // minValue needs to be set first
-                        else if (count != 0)
-                        {
-                            gcd = MathUtil.Gcd(gcd, v - minValue);
-                        }
-                    }
-
-                    minValue = Math.Min(minValue, v);
-                    maxValue = Math.Max(maxValue, v);
-
-                    if (uniqueValues != null)
-                    {
-                        if (uniqueValues.Add(v))
-                        {
-                            if (uniqueValues.Count > 256)
-                            {
-                                uniqueValues = null;
-                            }
-                        }
-                    }
-
-                    ++count;
-                }
-                Debug.Assert(count == maxDoc);
-            }
-
-            if (uniqueValues != null)
-            {
-                // small number of unique values
-                int bitsPerValue = PackedInt32s.BitsRequired(uniqueValues.Count - 1);
-                FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(maxDoc, bitsPerValue, acceptableOverheadRatio);
-                if (formatAndBits.BitsPerValue == 8 && minValue >= sbyte.MinValue && maxValue <= sbyte.MaxValue)
-                {
-                    meta.WriteByte((byte)UNCOMPRESSED); // uncompressed
-                    foreach (long? nv in values)
-                    {
-                        data.WriteByte((byte)nv.GetValueOrDefault());
-                    }
-                }
-                else
-                {
-                    meta.WriteByte((byte)TABLE_COMPRESSED); // table-compressed
-                    var decode = uniqueValues.ToArray();
-                    var encode = new Dictionary<long, int>();
-                    data.WriteVInt32(decode.Length);
-                    for (int i = 0; i < decode.Length; i++)
-                    {
-                        data.WriteInt64(decode[i]);
-                        encode[decode[i]] = i;
-                    }
-
-                    meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                    data.WriteVInt32(formatAndBits.Format.Id);
-                    data.WriteVInt32(formatAndBits.BitsPerValue);
-
-                    PackedInt32s.Writer writer = PackedInt32s.GetWriterNoHeader(data, formatAndBits.Format, maxDoc, formatAndBits.BitsPerValue, PackedInt32s.DEFAULT_BUFFER_SIZE);
-                    foreach (long? nv in values)
-                    {
-                        writer.Add(encode[nv.GetValueOrDefault()]);
-                    }
-                    writer.Finish();
-                }
-            }
-            else if (gcd != 0 && gcd != 1)
-            {
-                meta.WriteByte((byte)GCD_COMPRESSED);
-                meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                data.WriteInt64(minValue);
-                data.WriteInt64(gcd);
-                data.WriteVInt32(BLOCK_SIZE);
-
-                var writer = new BlockPackedWriter(data, BLOCK_SIZE);
-                foreach (long? nv in values)
-                {
-                    writer.Add((nv.GetValueOrDefault() - minValue) / gcd);
-                }
-                writer.Finish();
-            }
-            else
-            {
-                meta.WriteByte((byte)DELTA_COMPRESSED); // delta-compressed
-
-                meta.WriteVInt32(PackedInt32s.VERSION_CURRENT);
-                data.WriteVInt32(BLOCK_SIZE);
-
-                var writer = new BlockPackedWriter(data, BLOCK_SIZE);
-                foreach (long? nv in values)
-                {
-                    writer.Add(nv.GetValueOrDefault());
-                }
-                writer.Finish();
-            }
-        }
-
-        protected override void Dispose(bool disposing)
-        {
-            if (disposing)
-            {
-                bool success = false;
-                try
-                {
-                    if (meta != null)
-                    {
-                        meta.WriteVInt32(-1); // write EOF marker
-                        CodecUtil.WriteFooter(meta); // write checksum
-                    }
-                    if (data != null)
-                    {
-                        CodecUtil.WriteFooter(data); // write checksum
-                    }
-                    success = true;
-                }
-                finally
-                {
-                    if (success)
-                    {
-                        IOUtils.Close(data, meta);
-                    }
-                    else
-                    {
-                        IOUtils.CloseWhileHandlingException(data, meta);
-                    }
-                    meta = data = null;
-                }
-            }
-        }
-
-        public override void AddBinaryField(FieldInfo field, IEnumerable<BytesRef> values)
-        {
-            throw new NotSupportedException();
-        }
-
-        public override void AddSortedField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrd)
-        {
-            throw new NotSupportedException();
-        }
-
-        public override void AddSortedSetField(FieldInfo field, IEnumerable<BytesRef> values, IEnumerable<long?> docToOrdCount, IEnumerable<long?> ords)
-        {
-            throw new NotSupportedException();
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsFormat.cs b/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsFormat.cs
deleted file mode 100644
index 66e0c3c..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42NormsFormat.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
-    using SegmentReadState = Lucene.Net.Index.SegmentReadState;
-    using SegmentWriteState = Lucene.Net.Index.SegmentWriteState;
-
-    /// <summary>
-    /// Lucene 4.2 score normalization format.
-    /// <p>
-    /// NOTE: this uses the same format as <seealso cref="Lucene42DocValuesFormat"/>
-    /// Numeric DocValues, but with different file extensions, and passing
-    /// <seealso cref="PackedInt32s#FASTEST"/> for uncompressed encoding: trading off
-    /// space for performance.
-    /// <p>
-    /// Files:
-    /// <ul>
-    ///   <li><tt>.nvd</tt>: DocValues data</li>
-    ///   <li><tt>.nvm</tt>: DocValues metadata</li>
-    /// </ul> </summary>
-    /// <seealso cref= Lucene42DocValuesFormat </seealso>
-    public class Lucene42NormsFormat : NormsFormat
-    {
-        internal readonly float acceptableOverheadRatio;
-
-        /// <summary>
-        /// Calls {@link #Lucene42NormsFormat(float)
-        /// Lucene42DocValuesFormat(PackedInts.FASTEST)}
-        /// </summary>
-        public Lucene42NormsFormat()
-            : this(PackedInt32s.FASTEST)
-        {
-            // note: we choose FASTEST here (otherwise our norms are half as big but 15% slower than previous lucene)
-        }
-
-        /// <summary>
-        /// Creates a new Lucene42DocValuesFormat with the specified
-        /// <code>acceptableOverheadRatio</code> for NumericDocValues. </summary>
-        /// <param name="acceptableOverheadRatio"> compression parameter for numerics.
-        ///        Currently this is only used when the number of unique values is small.
-        ///
-        /// @lucene.experimental </param>
-        public Lucene42NormsFormat(float acceptableOverheadRatio)
-        {
-            this.acceptableOverheadRatio = acceptableOverheadRatio;
-        }
-
-        public override DocValuesConsumer NormsConsumer(SegmentWriteState state)
-        {
-            return new Lucene42NormsConsumer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION, acceptableOverheadRatio);
-        }
-
-        public override DocValuesProducer NormsProducer(SegmentReadState state)
-        {
-            return new Lucene42DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, METADATA_CODEC, METADATA_EXTENSION);
-        }
-
-        private const string DATA_CODEC = "Lucene41NormsData";
-        private const string DATA_EXTENSION = "nvd";
-        private const string METADATA_CODEC = "Lucene41NormsMetadata";
-        private const string METADATA_EXTENSION = "nvm";
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42TermVectorsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42TermVectorsFormat.cs b/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42TermVectorsFormat.cs
deleted file mode 100644
index 27e491e..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene42/Lucene42TermVectorsFormat.cs
+++ /dev/null
@@ -1,127 +0,0 @@
-namespace Lucene.Net.Codecs.Lucene42
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using CompressingTermVectorsFormat = Lucene.Net.Codecs.Compressing.CompressingTermVectorsFormat;
-    using CompressionMode = Lucene.Net.Codecs.Compressing.CompressionMode;
-
-    /// <summary>
-    /// Lucene 4.2 <seealso cref="TermVectorsFormat term vectors format"/>.
-    /// <p>
-    /// Very similarly to <seealso cref="Lucene41StoredFieldsFormat"/>, this format is based
-    /// on compressed chunks of data, with document-level granularity so that a
-    /// document can never span across distinct chunks. Moreover, data is made as
-    /// compact as possible:<ul>
-    /// <li>textual data is compressed using the very light,
-    /// <a href="http://code.google.com/p/lz4/">LZ4</a> compression algorithm,
-    /// <li>binary data is written using fixed-size blocks of
-    /// <seealso cref="PackedInts packed ints"/>.
-    /// </ul>
-    /// <p>
-    /// Term vectors are stored using two files<ul>
-    /// <li>a data file where terms, frequencies, positions, offsets and payloads
-    /// are stored,
-    /// <li>an index file, loaded into memory, used to locate specific documents in
-    /// the data file.
-    /// </ul>
-    /// Looking up term vectors for any document requires at most 1 disk seek.
-    /// <p><b>File formats</b>
-    /// <ol>
-    /// <li><a name="vector_data" id="vector_data"></a>
-    /// <p>A vector data file (extension <tt>.tvd</tt>). this file stores terms,
-    /// frequencies, positions, offsets and payloads for every document. Upon writing
-    /// a new segment, it accumulates data into memory until the buffer used to store
-    /// terms and payloads grows beyond 4KB. Then it flushes all metadata, terms
-    /// and positions to disk using <a href="http://code.google.com/p/lz4/">LZ4</a>
-    /// compression for terms and payloads and
-    /// <seealso cref="BlockPackedWriter blocks of packed ints"/> for positions.</p>
-    /// <p>Here is a more detailed description of the field data file format:</p>
-    /// <ul>
-    /// <li>VectorData (.tvd) --&gt; &lt;Header&gt;, PackedIntsVersion, ChunkSize, &lt;Chunk&gt;<sup>ChunkCount</sup>, Footer</li>
-    /// <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    /// <li>PackedIntsVersion --&gt; <seealso cref="PackedInts#VERSION_CURRENT"/> as a <seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// <li>ChunkSize is the number of bytes of terms to accumulate before flushing, as a <seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// <li>ChunkCount is not known in advance and is the number of chunks necessary to store all document of the segment</li>
-    /// <li>Chunk --&gt; DocBase, ChunkDocs, &lt; NumFields &gt;, &lt; FieldNums &gt;, &lt; FieldNumOffs &gt;, &lt; Flags &gt;,
-    /// &lt; NumTerms &gt;, &lt; TermLengths &gt;, &lt; TermFreqs &gt;, &lt; Positions &gt;, &lt; StartOffsets &gt;, &lt; Lengths &gt;,
-    /// &lt; PayloadLengths &gt;, &lt; TermAndPayloads &gt;</li>
-    /// <li>DocBase is the ID of the first doc of the chunk as a <seealso cref="DataOutput#writeVInt VInt"/></li>
-    /// <li>ChunkDocs is the number of documents in the chunk</li>
-    /// <li>NumFields --&gt; DocNumFields<sup>ChunkDocs</sup></li>
-    /// <li>DocNumFields is the number of fields for each doc, written as a <seealso cref="DataOutput#writeVInt VInt"/> if ChunkDocs==1 and as a <seealso cref="PackedInts"/> array otherwise</li>
-    /// <li>FieldNums --&gt; FieldNumDelta<sup>TotalDistincFields</sup>, a delta-encoded list of the sorted unique field numbers present in the chunk</li>
-    /// <li>FieldNumOffs --&gt; FieldNumOff<sup>TotalFields</sup>, as a <seealso cref="PackedInts"/> array</li>
-    /// <li>FieldNumOff is the offset of the field number in FieldNums</li>
-    /// <li>TotalFields is the total number of fields (sum of the values of NumFields)</li>
-    /// <li>Flags --&gt; Bit &lt; FieldFlags &gt;</li>
-    /// <li>Bit  is a single bit which when true means that fields have the same options for every document in the chunk</li>
-    /// <li>FieldFlags --&gt; if Bit==1: Flag<sup>TotalDistinctFields</sup> else Flag<sup>TotalFields</sup></li>
-    /// <li>Flag: a 3-bits int where:<ul>
-    /// <li>the first bit means that the field has positions</li>
-    /// <li>the second bit means that the field has offsets</li>
-    /// <li>the third bit means that the field has payloads</li>
-    /// </ul></li>
-    /// <li>NumTerms --&gt; FieldNumTerms<sup>TotalFields</sup></li>
-    /// <li>FieldNumTerms: the number of terms for each field, using <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>TermLengths --&gt; PrefixLength<sup>TotalTerms</sup> SuffixLength<sup>TotalTerms</sup></li>
-    /// <li>TotalTerms: total number of terms (sum of NumTerms)</li>
-    /// <li>PrefixLength: 0 for the first term of a field, the common prefix with the previous term otherwise using <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>SuffixLength: length of the term minus PrefixLength for every term using <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>TermFreqs --&gt; TermFreqMinus1<sup>TotalTerms</sup></li>
-    /// <li>TermFreqMinus1: (frequency - 1) for each term using  <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>Positions --&gt; PositionDelta<sup>TotalPositions</sup></li>
-    /// <li>TotalPositions is the sum of frequencies of terms of all fields that have positions</li>
-    /// <li>PositionDelta: the absolute position for the first position of a term, and the difference with the previous positions for following positions using <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>StartOffsets --&gt; (AvgCharsPerTerm<sup>TotalDistinctFields</sup>) StartOffsetDelta<sup>TotalOffsets</sup></li>
-    /// <li>TotalOffsets is the sum of frequencies of terms of all fields that have offsets</li>
-    /// <li>AvgCharsPerTerm: average number of chars per term, encoded as a float on 4 bytes. They are not present if no field has both positions and offsets enabled.</li>
-    /// <li>StartOffsetDelta: (startOffset - previousStartOffset - AvgCharsPerTerm * PositionDelta). previousStartOffset is 0 for the first offset and AvgCharsPerTerm is 0 if the field has no positions using  <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>Lengths --&gt; LengthMinusTermLength<sup>TotalOffsets</sup></li>
-    /// <li>LengthMinusTermLength: (endOffset - startOffset - termLength) using  <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>PayloadLengths --&gt; PayloadLength<sup>TotalPayloads</sup></li>
-    /// <li>TotalPayloads is the sum of frequencies of terms of all fields that have payloads</li>
-    /// <li>PayloadLength is the payload length encoded using  <seealso cref="BlockPackedWriter blocks of 64 packed ints"/></li>
-    /// <li>TermAndPayloads --&gt; LZ4-compressed representation of &lt; FieldTermsAndPayLoads &gt;<sup>TotalFields</sup></li>
-    /// <li>FieldTermsAndPayLoads --&gt; Terms (Payloads)</li>
-    /// <li>Terms: term bytes</li>
-    /// <li>Payloads: payload bytes (if the field has payloads)</li>
-    /// <li>Footer --&gt; <seealso cref="CodecUtil#writeFooter CodecFooter"/></li>
-    /// </ul>
-    /// </li>
-    /// <li><a name="vector_index" id="vector_index"></a>
-    /// <p>An index file (extension <tt>.tvx</tt>).</p>
-    /// <ul>
-    /// <li>VectorIndex (.tvx) --&gt; &lt;Header&gt;, &lt;ChunkIndex&gt;, Footer</li>
-    /// <li>Header --&gt; <seealso cref="CodecUtil#writeHeader CodecHeader"/></li>
-    /// <li>ChunkIndex: See <seealso cref="CompressingStoredFieldsIndexWriter"/></li>
-    /// <li>Footer --&gt; <seealso cref="CodecUtil#writeFooter CodecFooter"/></li>
-    /// </ul>
-    /// </li>
-    /// </ol>
-    /// @lucene.experimental
-    /// </summary>
-    public sealed class Lucene42TermVectorsFormat : CompressingTermVectorsFormat
-    {
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene42TermVectorsFormat()
-            : base("Lucene41StoredFields", "", CompressionMode.FAST, 1 << 12)
-        {
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45Codec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45Codec.cs b/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45Codec.cs
deleted file mode 100644
index 8214bd6..0000000
--- a/src/Lucene.Net.Core/Codecs/Lucene45/Lucene45Codec.cs
+++ /dev/null
@@ -1,164 +0,0 @@
-using System;
-
-namespace Lucene.Net.Codecs.Lucene45
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Lucene40LiveDocsFormat = Lucene.Net.Codecs.Lucene40.Lucene40LiveDocsFormat;
-    using Lucene40SegmentInfoFormat = Lucene.Net.Codecs.Lucene40.Lucene40SegmentInfoFormat;
-    using Lucene41StoredFieldsFormat = Lucene.Net.Codecs.Lucene41.Lucene41StoredFieldsFormat;
-    using Lucene42FieldInfosFormat = Lucene.Net.Codecs.Lucene42.Lucene42FieldInfosFormat;
-    using Lucene42NormsFormat = Lucene.Net.Codecs.Lucene42.Lucene42NormsFormat;
-    using Lucene42TermVectorsFormat = Lucene.Net.Codecs.Lucene42.Lucene42TermVectorsFormat;
-    using PerFieldDocValuesFormat = Lucene.Net.Codecs.PerField.PerFieldDocValuesFormat;
-    using PerFieldPostingsFormat = Lucene.Net.Codecs.PerField.PerFieldPostingsFormat;
-
-    /// <summary>
-    /// Implements the Lucene 4.5 index format, with configurable per-field postings
-    /// and docvalues formats.
-    /// <p>
-    /// If you want to reuse functionality of this codec in another codec, extend
-    /// <seealso cref="FilterCodec"/>.
-    /// </summary>
-    /// <seealso cref= Lucene.Net.Codecs.Lucene45 package documentation for file format details.
-    /// @lucene.experimental </seealso>
-    /// @deprecated Only for reading old 4.3-4.5 segments
-    // NOTE: if we make largish changes in a minor release, easier to just make Lucene46Codec or whatever
-    // if they are backwards compatible or smallish we can probably do the backwards in the postingsreader
-    // (it writes a minor version, etc).
-    [Obsolete("Only for reading old 4.3-4.5 segments")]
-    [CodecName("Lucene45")] // LUCENENET specific - using CodecName attribute to ensure the default name passed from subclasses is the same as this class name
-    public class Lucene45Codec : Codec
-    {
-        private readonly StoredFieldsFormat fieldsFormat = new Lucene41StoredFieldsFormat();
-        private readonly TermVectorsFormat vectorsFormat = new Lucene42TermVectorsFormat();
-        private readonly FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormat();
-        private readonly SegmentInfoFormat infosFormat = new Lucene40SegmentInfoFormat();
-        private readonly LiveDocsFormat liveDocsFormat = new Lucene40LiveDocsFormat();
-
-        private readonly PostingsFormat postingsFormat;
-
-        private class PerFieldPostingsFormatAnonymousInnerClassHelper : PerFieldPostingsFormat
-        {
-            private readonly Lucene45Codec outerInstance;
-
-            public PerFieldPostingsFormatAnonymousInnerClassHelper(Lucene45Codec outerInstance)
-            {
-                this.outerInstance = outerInstance;
-            }
-
-            public override PostingsFormat GetPostingsFormatForField(string field)
-            {
-                return outerInstance.GetPostingsFormatForField(field);
-            }
-        }
-
-        private readonly DocValuesFormat docValuesFormat;
-
-        private class PerFieldDocValuesFormatAnonymousInnerClassHelper : PerFieldDocValuesFormat
-        {
-            private readonly Lucene45Codec outerInstance;
-
-            public PerFieldDocValuesFormatAnonymousInnerClassHelper(Lucene45Codec outerInstance)
-            {
-                this.outerInstance = outerInstance;
-            }
-
-            public override DocValuesFormat GetDocValuesFormatForField(string field)
-            {
-                return outerInstance.GetDocValuesFormatForField(field);
-            }
-        }
-
-        /// <summary>
-        /// Sole constructor. </summary>
-        public Lucene45Codec()
-            : base()
-        {
-            postingsFormat = new PerFieldPostingsFormatAnonymousInnerClassHelper(this);
-            docValuesFormat = new PerFieldDocValuesFormatAnonymousInnerClassHelper(this);
-        }
-
-        public override sealed StoredFieldsFormat StoredFieldsFormat
-        {
-            get { return fieldsFormat; }
-        }
-
-        public override sealed TermVectorsFormat TermVectorsFormat
-        {
-            get { return vectorsFormat; }
-        }
-
-        public override sealed PostingsFormat PostingsFormat
-        {
-            get { return postingsFormat; }
-        }
-
-        public override FieldInfosFormat FieldInfosFormat
-        {
-            get { return fieldInfosFormat; }
-        }
-
-        public override SegmentInfoFormat SegmentInfoFormat
-        {
-            get { return infosFormat; }
-        }
-
-        public override sealed LiveDocsFormat LiveDocsFormat
-        {
-            get { return liveDocsFormat; }
-        }
-
-        /// <summary>
-        /// Returns the postings format that should be used for writing
-        ///  new segments of <code>field</code>.
-        ///
-        ///  The default implementation always returns "Lucene41"
-        /// </summary>
-        public virtual PostingsFormat GetPostingsFormatForField(string field)
-        {
-            return defaultFormat;
-        }
-
-        /// <summary>
-        /// Returns the docvalues format that should be used for writing
-        ///  new segments of <code>field</code>.
-        ///
-        ///  The default implementation always returns "Lucene45"
-        /// </summary>
-        public virtual DocValuesFormat GetDocValuesFormatForField(string field)
-        {
-            return defaultDVFormat;
-        }
-
-        public override sealed DocValuesFormat DocValuesFormat
-        {
-            get { return docValuesFormat; }
-        }
-
-        private readonly PostingsFormat defaultFormat = Codecs.PostingsFormat.ForName("Lucene41");
-        private readonly DocValuesFormat defaultDVFormat = Codecs.DocValuesFormat.ForName("Lucene45");
-
-        private readonly NormsFormat normsFormat = new Lucene42NormsFormat();
-
-        public override sealed NormsFormat NormsFormat
-        {
-            get { return normsFormat; }
-        }
-    }
-}
\ No newline at end of file


Mime
View raw message