lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [07/62] [abbrv] [partial] lucenenet git commit: Renamed Lucene.Net.Core folder Lucene.Net because the dotnet.exe pack command doesn't allow creating a NuGet package with a different name than its folder. Working around it with the script was much more co
Date Tue, 04 Apr 2017 17:19:13 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Index/ReadersAndUpdates.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/ReadersAndUpdates.cs b/src/Lucene.Net.Core/Index/ReadersAndUpdates.cs
deleted file mode 100644
index b3d520c..0000000
--- a/src/Lucene.Net.Core/Index/ReadersAndUpdates.cs
+++ /dev/null
@@ -1,964 +0,0 @@
-using Lucene.Net.Documents;
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Globalization;
-using System.Text;
-
-namespace Lucene.Net.Index
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using BinaryDocValuesField = BinaryDocValuesField;
-    using IBits = Lucene.Net.Util.IBits;
-    using BytesRef = Lucene.Net.Util.BytesRef;
-    using Codec = Lucene.Net.Codecs.Codec;
-    using Directory = Lucene.Net.Store.Directory;
-    using DocValuesConsumer = Lucene.Net.Codecs.DocValuesConsumer;
-    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using LiveDocsFormat = Lucene.Net.Codecs.LiveDocsFormat;
-    using IMutableBits = Lucene.Net.Util.IMutableBits;
-    using NumericDocValuesField = NumericDocValuesField;
-    using TrackingDirectoryWrapper = Lucene.Net.Store.TrackingDirectoryWrapper;
-
-    // Used by IndexWriter to hold open SegmentReaders (for
-    // searching or merging), plus pending deletes and updates,
-    // for a given segment
-#if FEATURE_SERIALIZABLE
-    [Serializable]
-#endif
-    internal class ReadersAndUpdates
-    {
-        // Not final because we replace (clone) when we need to
-        // change it and it's been shared:
-        public SegmentCommitInfo Info { get; private set; }
-
-        // Tracks how many consumers are using this instance:
-        private readonly AtomicInt32 refCount = new AtomicInt32(1);
-
-        private readonly IndexWriter writer;
-
-        // Set once (null, and then maybe set, and never set again):
-        private SegmentReader reader;
-
-        // TODO: it's sometimes wasteful that we hold open two
-        // separate SRs (one for merging one for
-        // reading)... maybe just use a single SR?  The gains of
-        // not loading the terms index (for merging in the
-        // non-NRT case) are far less now... and if the app has
-        // any deletes it'll open real readers anyway.
-
-        // Set once (null, and then maybe set, and never set again):
-        private SegmentReader mergeReader;
-
-        // Holds the current shared (readable and writable)
-        // liveDocs.  this is null when there are no deleted
-        // docs, and it's copy-on-write (cloned whenever we need
-        // to change it but it's been shared to an external NRT
-        // reader).
-        private IBits liveDocs;
-
-        // How many further deletions we've done against
-        // liveDocs vs when we loaded it or last wrote it:
-        private int pendingDeleteCount;
-
-        // True if the current liveDocs is referenced by an
-        // external NRT reader:
-        private bool liveDocsShared;
-
-        // Indicates whether this segment is currently being merged. While a segment
-        // is merging, all field updates are also registered in the
-        // mergingNumericUpdates map. Also, calls to writeFieldUpdates merge the
-        // updates with mergingNumericUpdates.
-        // That way, when the segment is done merging, IndexWriter can apply the
-        // updates on the merged segment too.
-        private bool isMerging = false;
-
-        private readonly IDictionary<string, DocValuesFieldUpdates> mergingDVUpdates = new Dictionary<string, DocValuesFieldUpdates>();
-
-        public ReadersAndUpdates(IndexWriter writer, SegmentCommitInfo info)
-        {
-            this.Info = info;
-            this.writer = writer;
-            liveDocsShared = true;
-        }
-
-        public virtual void IncRef()
-        {
-            int rc = refCount.IncrementAndGet();
-            Debug.Assert(rc > 1);
-        }
-
-        public virtual void DecRef()
-        {
-            int rc = refCount.DecrementAndGet();
-            Debug.Assert(rc >= 0);
-        }
-
-        public virtual int RefCount()
-        {
-            int rc = refCount.Get();
-            Debug.Assert(rc >= 0);
-            return rc;
-        }
-
-        public virtual int PendingDeleteCount
-        {
-            get
-            {
-                lock (this)
-                {
-                    return pendingDeleteCount;
-                }
-            }
-        }
-
-        // Call only from assert!
-        public virtual bool VerifyDocCounts()
-        {
-            lock (this)
-            {
-                int count;
-                if (liveDocs != null)
-                {
-                    count = 0;
-                    for (int docID = 0; docID < Info.Info.DocCount; docID++)
-                    {
-                        if (liveDocs.Get(docID))
-                        {
-                            count++;
-                        }
-                    }
-                }
-                else
-                {
-                    count = Info.Info.DocCount;
-                }
-
-                Debug.Assert(Info.Info.DocCount - Info.DelCount - pendingDeleteCount == count, "info.docCount=" + Info.Info.DocCount + " info.DelCount=" + Info.DelCount + " pendingDeleteCount=" + pendingDeleteCount + " count=" + count);
-                return true;
-            }
-        }
-
-        /// <summary>
-        /// Returns a <seealso cref="SegmentReader"/>. </summary>
-        public virtual SegmentReader GetReader(IOContext context)
-        {
-            if (reader == null)
-            {
-                // We steal returned ref:
-                reader = new SegmentReader(Info, writer.Config.ReaderTermsIndexDivisor, context);
-                if (liveDocs == null)
-                {
-                    liveDocs = reader.LiveDocs;
-                }
-            }
-
-            // Ref for caller
-            reader.IncRef();
-            return reader;
-        }
-
-        // Get reader for merging (does not load the terms
-        // index):
-        public virtual SegmentReader GetMergeReader(IOContext context)
-        {
-            lock (this)
-            {
-                //System.out.println("  livedocs=" + rld.liveDocs);
-
-                if (mergeReader == null)
-                {
-                    if (reader != null)
-                    {
-                        // Just use the already opened non-merge reader
-                        // for merging.  In the NRT case this saves us
-                        // pointless double-open:
-                        //System.out.println("PROMOTE non-merge reader seg=" + rld.info);
-                        // Ref for us:
-                        reader.IncRef();
-                        mergeReader = reader;
-                        //System.out.println(Thread.currentThread().getName() + ": getMergeReader share seg=" + info.name);
-                    }
-                    else
-                    {
-                        //System.out.println(Thread.currentThread().getName() + ": getMergeReader seg=" + info.name);
-                        // We steal returned ref:
-                        mergeReader = new SegmentReader(Info, -1, context);
-                        if (liveDocs == null)
-                        {
-                            liveDocs = mergeReader.LiveDocs;
-                        }
-                    }
-                }
-
-                // Ref for caller
-                mergeReader.IncRef();
-                return mergeReader;
-            }
-        }
-
-        public virtual void Release(SegmentReader sr)
-        {
-            lock (this)
-            {
-                Debug.Assert(Info == sr.SegmentInfo);
-                sr.DecRef();
-            }
-        }
-
-        public virtual bool Delete(int docID)
-        {
-            lock (this)
-            {
-                Debug.Assert(liveDocs != null);
-                //Debug.Assert(Thread.holdsLock(Writer));
-                Debug.Assert(docID >= 0 && docID < liveDocs.Length, "out of bounds: docid=" + docID + " liveDocsLength=" + liveDocs.Length + " seg=" + Info.Info.Name + " docCount=" + Info.Info.DocCount);
-                Debug.Assert(!liveDocsShared);
-                bool didDelete = liveDocs.Get(docID);
-                if (didDelete)
-                {
-                    ((IMutableBits)liveDocs).Clear(docID);
-                    pendingDeleteCount++;
-                    //System.out.println("  new del seg=" + info + " docID=" + docID + " pendingDelCount=" + pendingDeleteCount + " totDelCount=" + (info.docCount-liveDocs.count()));
-                }
-                return didDelete;
-            }
-        }
-
-        // NOTE: removes callers ref
-        public virtual void DropReaders()
-        {
-            lock (this)
-            {
-                // TODO: can we somehow use IOUtils here...?  problem is
-                // we are calling .decRef not .close)...
-                try
-                {
-                    if (reader != null)
-                    {
-                        //System.out.println("  pool.drop info=" + info + " rc=" + reader.getRefCount());
-                        try
-                        {
-                            reader.DecRef();
-                        }
-                        finally
-                        {
-                            reader = null;
-                        }
-                    }
-                }
-                finally
-                {
-                    if (mergeReader != null)
-                    {
-                        //System.out.println("  pool.drop info=" + info + " merge rc=" + mergeReader.getRefCount());
-                        try
-                        {
-                            mergeReader.DecRef();
-                        }
-                        finally
-                        {
-                            mergeReader = null;
-                        }
-                    }
-                }
-
-                DecRef();
-            }
-        }
-
-        /// <summary>
-        /// Returns a ref to a clone. NOTE: you should decRef() the reader when you're
-        /// dont (ie do not call close()).
-        /// </summary>
-        public virtual SegmentReader GetReadOnlyClone(IOContext context)
-        {
-            lock (this)
-            {
-                if (reader == null)
-                {
-                    GetReader(context).DecRef();
-                    Debug.Assert(reader != null);
-                }
-                liveDocsShared = true;
-                if (liveDocs != null)
-                {
-                    return new SegmentReader(reader.SegmentInfo, reader, liveDocs, Info.Info.DocCount - Info.DelCount - pendingDeleteCount);
-                }
-                else
-                {
-                    Debug.Assert(reader.LiveDocs == liveDocs);
-                    reader.IncRef();
-                    return reader;
-                }
-            }
-        }
-
-        public virtual void InitWritableLiveDocs()
-        {
-            lock (this)
-            {
-                //Debug.Assert(Thread.holdsLock(Writer));
-                Debug.Assert(Info.Info.DocCount > 0);
-                //System.out.println("initWritableLivedocs seg=" + info + " liveDocs=" + liveDocs + " shared=" + shared);
-                if (liveDocsShared)
-                {
-                    // Copy on write: this means we've cloned a
-                    // SegmentReader sharing the current liveDocs
-                    // instance; must now make a private clone so we can
-                    // change it:
-                    LiveDocsFormat liveDocsFormat = Info.Info.Codec.LiveDocsFormat;
-                    if (liveDocs == null)
-                    {
-                        //System.out.println("create BV seg=" + info);
-                        liveDocs = liveDocsFormat.NewLiveDocs(Info.Info.DocCount);
-                    }
-                    else
-                    {
-                        liveDocs = liveDocsFormat.NewLiveDocs(liveDocs);
-                    }
-                    liveDocsShared = false;
-                }
-            }
-        }
-
-        public virtual IBits LiveDocs
-        {
-            get
-            {
-                lock (this)
-                {
-                    //Debug.Assert(Thread.holdsLock(Writer));
-                    return liveDocs;
-                }
-            }
-        }
-
-        public virtual IBits GetReadOnlyLiveDocs()
-        {
-            lock (this)
-            {
-                //System.out.println("getROLiveDocs seg=" + info);
-                //Debug.Assert(Thread.holdsLock(Writer));
-                liveDocsShared = true;
-                //if (liveDocs != null) {
-                //System.out.println("  liveCount=" + liveDocs.count());
-                //}
-                return liveDocs;
-            }
-        }
-
-        public virtual void DropChanges()
-        {
-            lock (this)
-            {
-                // Discard (don't save) changes when we are dropping
-                // the reader; this is used only on the sub-readers
-                // after a successful merge.  If deletes had
-                // accumulated on those sub-readers while the merge
-                // is running, by now we have carried forward those
-                // deletes onto the newly merged segment, so we can
-                // discard them on the sub-readers:
-                pendingDeleteCount = 0;
-                DropMergingUpdates();
-            }
-        }
-
-        // Commit live docs (writes new _X_N.del files) and field updates (writes new
-        // _X_N updates files) to the directory; returns true if it wrote any file
-        // and false if there were no new deletes or updates to write:
-        // TODO (DVU_RENAME) to writeDeletesAndUpdates
-        public virtual bool WriteLiveDocs(Directory dir)
-        {
-            lock (this)
-            {
-                //Debug.Assert(Thread.holdsLock(Writer));
-                //System.out.println("rld.writeLiveDocs seg=" + info + " pendingDelCount=" + pendingDeleteCount + " numericUpdates=" + numericUpdates);
-                if (pendingDeleteCount == 0)
-                {
-                    return false;
-                }
-
-                // We have new deletes
-                Debug.Assert(liveDocs.Length == Info.Info.DocCount);
-
-                // Do this so we can delete any created files on
-                // exception; this saves all codecs from having to do
-                // it:
-                TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
-
-                // We can write directly to the actual name (vs to a
-                // .tmp & renaming it) because the file is not live
-                // until segments file is written:
-                bool success = false;
-                try
-                {
-                    Codec codec = Info.Info.Codec;
-                    codec.LiveDocsFormat.WriteLiveDocs((IMutableBits)liveDocs, trackingDir, Info, pendingDeleteCount, IOContext.DEFAULT);
-                    success = true;
-                }
-                finally
-                {
-                    if (!success)
-                    {
-                        // Advance only the nextWriteDelGen so that a 2nd
-                        // attempt to write will write to a new file
-                        Info.AdvanceNextWriteDelGen();
-
-                        // Delete any partially created file(s):
-                        foreach (string fileName in trackingDir.CreatedFiles)
-                        {
-                            try
-                            {
-                                dir.DeleteFile(fileName);
-                            }
-                            catch (Exception)
-                            {
-                                // Ignore so we throw only the first exc
-                            }
-                        }
-                    }
-                }
-
-                // If we hit an exc in the line above (eg disk full)
-                // then info's delGen remains pointing to the previous
-                // (successfully written) del docs:
-                Info.AdvanceDelGen();
-                Info.DelCount = Info.DelCount + pendingDeleteCount;
-                pendingDeleteCount = 0;
-
-                return true;
-            }
-        }
-
-        // Writes field updates (new _X_N updates files) to the directory
-        public virtual void WriteFieldUpdates(Directory dir, DocValuesFieldUpdates.Container dvUpdates)
-        {
-            lock (this)
-            {
-                //Debug.Assert(Thread.holdsLock(Writer));
-                //System.out.println("rld.writeFieldUpdates: seg=" + info + " numericFieldUpdates=" + numericFieldUpdates);
-
-                Debug.Assert(dvUpdates.Any());
-
-                // Do this so we can delete any created files on
-                // exception; this saves all codecs from having to do
-                // it:
-                TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(dir);
-
-                FieldInfos fieldInfos = null;
-                bool success = false;
-                try
-                {
-                    Codec codec = Info.Info.Codec;
-
-                    // reader could be null e.g. for a just merged segment (from
-                    // IndexWriter.commitMergedDeletes).
-                    SegmentReader reader = this.reader == null ? new SegmentReader(Info, writer.Config.ReaderTermsIndexDivisor, IOContext.READ_ONCE) : this.reader;
-                    try
-                    {
-                        // clone FieldInfos so that we can update their dvGen separately from
-                        // the reader's infos and write them to a new fieldInfos_gen file
-                        FieldInfos.Builder builder = new FieldInfos.Builder(writer.globalFieldNumberMap);
-                        // cannot use builder.add(reader.getFieldInfos()) because it does not
-                        // clone FI.attributes as well FI.dvGen
-                        foreach (FieldInfo fi in reader.FieldInfos)
-                        {
-                            FieldInfo clone = builder.Add(fi);
-                            // copy the stuff FieldInfos.Builder doesn't copy
-                            if (fi.Attributes != null)
-                            {
-                                foreach (KeyValuePair<string, string> e in fi.Attributes)
-                                {
-                                    clone.PutAttribute(e.Key, e.Value);
-                                }
-                            }
-                            clone.DocValuesGen = fi.DocValuesGen;
-                        }
-                        // create new fields or update existing ones to have NumericDV type
-                        foreach (string f in dvUpdates.numericDVUpdates.Keys)
-                        {
-                            builder.AddOrUpdate(f, NumericDocValuesField.TYPE);
-                        }
-                        // create new fields or update existing ones to have BinaryDV type
-                        foreach (string f in dvUpdates.binaryDVUpdates.Keys)
-                        {
-                            builder.AddOrUpdate(f, BinaryDocValuesField.fType);
-                        }
-
-                        fieldInfos = builder.Finish();
-                        long nextFieldInfosGen = Info.NextFieldInfosGen;
-                        string segmentSuffix = nextFieldInfosGen.ToString(CultureInfo.InvariantCulture);//Convert.ToString(nextFieldInfosGen, Character.MAX_RADIX));
-                        SegmentWriteState state = new SegmentWriteState(null, trackingDir, Info.Info, fieldInfos, writer.Config.TermIndexInterval, null, IOContext.DEFAULT, segmentSuffix);
-                        DocValuesFormat docValuesFormat = codec.DocValuesFormat;
-                        DocValuesConsumer fieldsConsumer = docValuesFormat.FieldsConsumer(state);
-                        bool fieldsConsumerSuccess = false;
-                        try
-                        {
-                            //          System.out.println("[" + Thread.currentThread().getName() + "] RLD.writeFieldUpdates: applying numeric updates; seg=" + info + " updates=" + numericFieldUpdates);
-                            foreach (KeyValuePair<string, NumericDocValuesFieldUpdates> e in dvUpdates.numericDVUpdates)
-                            {
-                                string field = e.Key;
-                                NumericDocValuesFieldUpdates fieldUpdates = e.Value;
-                                FieldInfo fieldInfo = fieldInfos.FieldInfo(field);
-                                Debug.Assert(fieldInfo != null);
-
-                                fieldInfo.DocValuesGen = nextFieldInfosGen;
-                                // write the numeric updates to a new gen'd docvalues file
-                                fieldsConsumer.AddNumericField(fieldInfo, GetInt64Enumerable(reader, field, fieldUpdates));
-                            }
-
-                            //        System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " updates=" + dvUpdates.binaryDVUpdates);
-                            foreach (KeyValuePair<string, BinaryDocValuesFieldUpdates> e in dvUpdates.binaryDVUpdates)
-                            {
-                                string field = e.Key;
-                                BinaryDocValuesFieldUpdates dvFieldUpdates = e.Value;
-                                FieldInfo fieldInfo = fieldInfos.FieldInfo(field);
-                                Debug.Assert(fieldInfo != null);
-
-                                //          System.out.println("[" + Thread.currentThread().getName() + "] RAU.writeFieldUpdates: applying binary updates; seg=" + info + " f=" + dvFieldUpdates + ", updates=" + dvFieldUpdates);
-
-                                fieldInfo.DocValuesGen = nextFieldInfosGen;
-                                // write the numeric updates to a new gen'd docvalues file
-                                fieldsConsumer.AddBinaryField(fieldInfo, GetBytesRefEnumerable(reader, field, dvFieldUpdates));
-                            }
-
-                            codec.FieldInfosFormat.FieldInfosWriter.Write(trackingDir, Info.Info.Name, segmentSuffix, fieldInfos, IOContext.DEFAULT);
-                            fieldsConsumerSuccess = true;
-                        }
-                        finally
-                        {
-                            if (fieldsConsumerSuccess)
-                            {
-                                fieldsConsumer.Dispose();
-                            }
-                            else
-                            {
-                                IOUtils.CloseWhileHandlingException(fieldsConsumer);
-                            }
-                        }
-                    }
-                    finally
-                    {
-                        if (reader != this.reader)
-                        {
-                            //          System.out.println("[" + Thread.currentThread().getName() + "] RLD.writeLiveDocs: closeReader " + reader);
-                            reader.Dispose();
-                        }
-                    }
-
-                    success = true;
-                }
-                finally
-                {
-                    if (!success)
-                    {
-                        // Advance only the nextWriteDocValuesGen so that a 2nd
-                        // attempt to write will write to a new file
-                        Info.AdvanceNextWriteFieldInfosGen();
-
-                        // Delete any partially created file(s):
-                        foreach (string fileName in trackingDir.CreatedFiles)
-                        {
-                            try
-                            {
-                                dir.DeleteFile(fileName);
-                            }
-                            catch (Exception)
-                            {
-                                // Ignore so we throw only the first exc
-                            }
-                        }
-                    }
-                }
-
-                Info.AdvanceFieldInfosGen();
-                // copy all the updates to mergingUpdates, so they can later be applied to the merged segment
-                if (isMerging)
-                {
-                    foreach (KeyValuePair<string, NumericDocValuesFieldUpdates> e in dvUpdates.numericDVUpdates)
-                    {
-                        DocValuesFieldUpdates updates;
-                        if (!mergingDVUpdates.TryGetValue(e.Key, out updates))
-                        {
-                            mergingDVUpdates[e.Key] = e.Value;
-                        }
-                        else
-                        {
-                            updates.Merge(e.Value);
-                        }
-                    }
-                    foreach (KeyValuePair<string, BinaryDocValuesFieldUpdates> e in dvUpdates.binaryDVUpdates)
-                    {
-                        DocValuesFieldUpdates updates;
-                        if (!mergingDVUpdates.TryGetValue(e.Key, out updates))
-                        {
-                            mergingDVUpdates[e.Key] = e.Value;
-                        }
-                        else
-                        {
-                            updates.Merge(e.Value);
-                        }
-                    }
-                }
-
-                // create a new map, keeping only the gens that are in use
-                IDictionary<long, ISet<string>> genUpdatesFiles = Info.UpdatesFiles;
-                IDictionary<long, ISet<string>> newGenUpdatesFiles = new Dictionary<long, ISet<string>>();
-                long fieldInfosGen = Info.FieldInfosGen;
-                foreach (FieldInfo fi in fieldInfos)
-                {
-                    long dvGen = fi.DocValuesGen;
-                    if (dvGen != -1 && !newGenUpdatesFiles.ContainsKey(dvGen))
-                    {
-                        if (dvGen == fieldInfosGen)
-                        {
-                            newGenUpdatesFiles[fieldInfosGen] = trackingDir.CreatedFiles;
-                        }
-                        else
-                        {
-                            newGenUpdatesFiles[dvGen] = genUpdatesFiles[dvGen];
-                        }
-                    }
-                }
-
-                Info.SetGenUpdatesFiles(newGenUpdatesFiles);
-
-                // wrote new files, should checkpoint()
-                writer.Checkpoint();
-
-                // if there is a reader open, reopen it to reflect the updates
-                if (reader != null)
-                {
-                    SegmentReader newReader = new SegmentReader(Info, reader, liveDocs, Info.Info.DocCount - Info.DelCount - pendingDeleteCount);
-                    bool reopened = false;
-                    try
-                    {
-                        reader.DecRef();
-                        reader = newReader;
-                        reopened = true;
-                    }
-                    finally
-                    {
-                        if (!reopened)
-                        {
-                            newReader.DecRef();
-                        }
-                    }
-                }
-            }
-        }
-
-        /// <summary>
-        /// NOTE: This was getLongEnumerable() in Lucene
-        /// </summary>
-        private IEnumerable<long?> GetInt64Enumerable(SegmentReader reader, string field, NumericDocValuesFieldUpdates fieldUpdates)
-        {
-            int maxDoc = reader.MaxDoc;
-            IBits DocsWithField = reader.GetDocsWithField(field);
-            NumericDocValues currentValues = reader.GetNumericDocValues(field);
-            NumericDocValuesFieldUpdates.Iterator iter = (NumericDocValuesFieldUpdates.Iterator)fieldUpdates.GetIterator();
-            int updateDoc = iter.NextDoc();
-
-            for (int curDoc = 0; curDoc < maxDoc; ++curDoc)
-            {
-                if (curDoc == updateDoc) //document has an updated value
-                {
-                    long? value = (long?)iter.Value; // either null or updated
-                    updateDoc = iter.NextDoc(); //prepare for next round
-                    yield return value;
-                }
-                else
-                {   // no update for this document
-                    if (currentValues != null && DocsWithField.Get(curDoc))
-                    {
-                        // only read the current value if the document had a value before
-                        yield return currentValues.Get(curDoc);
-                    }
-                    else
-                    {
-                        yield return null;
-                    }
-                }
-            }
-        }
-
-        private IEnumerable<BytesRef> GetBytesRefEnumerable(SegmentReader reader, string field, BinaryDocValuesFieldUpdates fieldUpdates)
-        {
-            BinaryDocValues currentValues = reader.GetBinaryDocValues(field);
-            IBits DocsWithField = reader.GetDocsWithField(field);
-            int maxDoc = reader.MaxDoc;
-            var iter = (BinaryDocValuesFieldUpdates.Iterator)fieldUpdates.GetIterator();
-            int updateDoc = iter.NextDoc();
-
-            for (int curDoc = 0; curDoc < maxDoc; ++curDoc)
-            {
-                if (curDoc == updateDoc) //document has an updated value
-                {
-                    BytesRef value = (BytesRef)iter.Value; // either null or updated
-                    updateDoc = iter.NextDoc(); //prepare for next round
-                    yield return value;
-                }
-                else
-                {   // no update for this document
-                    if (currentValues != null && DocsWithField.Get(curDoc))
-                    {
-                        var scratch = new BytesRef();
-                        // only read the current value if the document had a value before
-                        currentValues.Get(curDoc, scratch);
-                        yield return scratch;
-                    }
-                    else
-                    {
-                        yield return null;
-                    }
-                }
-            }
-        }
-
-        /*
-	  private class IterableAnonymousInnerClassHelper : IEnumerable<Number>
-	  {
-		  private readonly ReadersAndUpdates OuterInstance;
-
-		  private Lucene.Net.Index.SegmentReader Reader;
-		  private string Field;
-		  private Lucene.Net.Index.NumericDocValuesFieldUpdates FieldUpdates;
-
-		  public IterableAnonymousInnerClassHelper(ReadersAndUpdates outerInstance, Lucene.Net.Index.SegmentReader reader, string field, Lucene.Net.Index.NumericDocValuesFieldUpdates fieldUpdates)
-		  {
-			  this.OuterInstance = outerInstance;
-			  this.Reader = reader;
-			  this.Field = field;
-			  this.FieldUpdates = fieldUpdates;
-			  currentValues = reader.GetNumericDocValues(field);
-			  docsWithField = reader.GetDocsWithField(field);
-			  maxDoc = reader.MaxDoc;
-			  updatesIter = fieldUpdates.Iterator();
-		  }
-
-		  internal readonly NumericDocValues currentValues;
-		  internal readonly Bits docsWithField;
-		  internal readonly int maxDoc;
-		  internal readonly NumericDocValuesFieldUpdates.Iterator updatesIter;
-		  public virtual IEnumerator<Number> GetEnumerator()
-		  {
-			updatesIter.Reset();
-			return new IteratorAnonymousInnerClassHelper(this);
-		  }
-
-		  private class IteratorAnonymousInnerClassHelper : IEnumerator<Number>
-		  {
-			  private readonly IterableAnonymousInnerClassHelper OuterInstance;
-
-			  public IteratorAnonymousInnerClassHelper(IterableAnonymousInnerClassHelper outerInstance)
-			  {
-                  this.OuterInstance = outerInstance;
-				  curDoc = -1;
-				  updateDoc = updatesIter.NextDoc();
-			  }
-
-			  internal int curDoc;
-			  internal int updateDoc;
-
-			  public virtual bool HasNext()
-			  {
-				return curDoc < maxDoc - 1;
-			  }
-
-			  public virtual Number Next()
-			  {
-				if (++curDoc >= maxDoc)
-				{
-				  throw new NoSuchElementException("no more documents to return values for");
-				}
-				if (curDoc == updateDoc) // this document has an updated value
-				{
-				  long? value = updatesIter.value(); // either null (unset value) or updated value
-				  updateDoc = updatesIter.nextDoc(); // prepare for next round
-				  return value;
-				}
-				else
-				{
-				  // no update for this document
-				  Debug.Assert(curDoc < updateDoc);
-				  if (currentValues != null && docsWithField.Get(curDoc))
-				  {
-					// only read the current value if the document had a value before
-					return currentValues.Get(curDoc);
-				  }
-				  else
-				  {
-					return null;
-				  }
-				}
-			  }
-
-			  public virtual void Remove()
-			  {
-				throw new System.NotSupportedException("this iterator does not support removing elements");
-			  }
-		  }
-	  }*/
-        /*
-	  private class IterableAnonymousInnerClassHelper2 : IEnumerable<BytesRef>
-	  {
-		  private readonly ReadersAndUpdates OuterInstance;
-
-		  private Lucene.Net.Index.SegmentReader Reader;
-		  private string Field;
-		  private Lucene.Net.Index.BinaryDocValuesFieldUpdates DvFieldUpdates;
-
-		  public IterableAnonymousInnerClassHelper2(ReadersAndUpdates outerInstance, Lucene.Net.Index.SegmentReader reader, string field, Lucene.Net.Index.BinaryDocValuesFieldUpdates dvFieldUpdates)
-		  {
-			  this.OuterInstance = outerInstance;
-			  this.Reader = reader;
-			  this.Field = field;
-			  this.DvFieldUpdates = dvFieldUpdates;
-			  currentValues = reader.GetBinaryDocValues(field);
-			  docsWithField = reader.GetDocsWithField(field);
-			  maxDoc = reader.MaxDoc;
-			  updatesIter = dvFieldUpdates.Iterator();
-		  }
-
-		  internal readonly BinaryDocValues currentValues;
-		  internal readonly Bits docsWithField;
-		  internal readonly int maxDoc;
-		  internal readonly BinaryDocValuesFieldUpdates.Iterator updatesIter;
-		  public virtual IEnumerator<BytesRef> GetEnumerator()
-		  {
-			updatesIter.Reset();
-			return new IteratorAnonymousInnerClassHelper2(this);
-		  }
-
-		  private class IteratorAnonymousInnerClassHelper2 : IEnumerator<BytesRef>
-		  {
-			  private readonly IterableAnonymousInnerClassHelper2 OuterInstance;
-
-			  public IteratorAnonymousInnerClassHelper2(IterableAnonymousInnerClassHelper2 outerInstance)
-			  {
-                  this.OuterInstance = outerInstance;
-				  curDoc = -1;
-				  updateDoc = updatesIter.nextDoc();
-				  scratch = new BytesRef();
-			  }
-
-			  internal int curDoc;
-			  internal int updateDoc;
-			  internal BytesRef scratch;
-
-			  public virtual bool HasNext()
-			  {
-				return curDoc < maxDoc - 1;
-			  }
-
-			  public virtual BytesRef Next()
-			  {
-				if (++curDoc >= maxDoc)
-				{
-				  throw new NoSuchElementException("no more documents to return values for");
-				}
-				if (curDoc == updateDoc) // this document has an updated value
-				{
-				  BytesRef value = updatesIter.value(); // either null (unset value) or updated value
-				  updateDoc = updatesIter.nextDoc(); // prepare for next round
-				  return value;
-				}
-				else
-				{
-				  // no update for this document
-				  Debug.Assert(curDoc < updateDoc);
-				  if (currentValues != null && docsWithField.get(curDoc))
-				  {
-					// only read the current value if the document had a value before
-					currentValues.get(curDoc, scratch);
-					return scratch;
-				  }
-				  else
-				  {
-					return null;
-				  }
-				}
-			  }
-
-			  public virtual void Remove()
-			  {
-				throw new System.NotSupportedException("this iterator does not support removing elements");
-			  }
-		  }
-	  }*/
-
-        /// <summary>
-        /// Returns a reader for merge. this method applies field updates if there are
-        /// any and marks that this segment is currently merging.
-        /// </summary>
-        internal virtual SegmentReader GetReaderForMerge(IOContext context)
-        {
-            lock (this)
-            {
-                //Debug.Assert(Thread.holdsLock(Writer));
-                // must execute these two statements as atomic operation, otherwise we
-                // could lose updates if e.g. another thread calls writeFieldUpdates in
-                // between, or the updates are applied to the obtained reader, but then
-                // re-applied in IW.commitMergedDeletes (unnecessary work and potential
-                // bugs).
-                isMerging = true;
-                return GetReader(context);
-            }
-        }
-
-        /// <summary>
-        /// Drops all merging updates. Called from IndexWriter after this segment
-        /// finished merging (whether successfully or not).
-        /// </summary>
-        public virtual void DropMergingUpdates()
-        {
-            lock (this)
-            {
-                mergingDVUpdates.Clear();
-                isMerging = false;
-            }
-        }
-
-        /// <summary>
-        /// Returns updates that came in while this segment was merging. </summary>
-        public virtual IDictionary<string, DocValuesFieldUpdates> MergingFieldUpdates
-        {
-            get
-            {
-                lock (this)
-                {
-                    return mergingDVUpdates;
-                }
-            }
-        }
-
-        public override string ToString()
-        {
-            StringBuilder sb = new StringBuilder();
-            sb.Append("ReadersAndLiveDocs(seg=").Append(Info);
-            sb.Append(" pendingDeleteCount=").Append(pendingDeleteCount);
-            sb.Append(" liveDocsShared=").Append(liveDocsShared);
-            return sb.ToString();
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Index/SegmentCommitInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/SegmentCommitInfo.cs b/src/Lucene.Net.Core/Index/SegmentCommitInfo.cs
deleted file mode 100644
index b74e52e..0000000
--- a/src/Lucene.Net.Core/Index/SegmentCommitInfo.cs
+++ /dev/null
@@ -1,338 +0,0 @@
-using Lucene.Net.Support;
-using System;
-using System.Collections.Generic;
-
-namespace Lucene.Net.Index
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Directory = Lucene.Net.Store.Directory;
-
-    /// <summary>
-    /// Embeds a [read-only] SegmentInfo and adds per-commit
-    ///  fields.
-    ///
-    ///  @lucene.experimental
-    /// </summary>
-#if FEATURE_SERIALIZABLE
-    [Serializable]
-#endif
-    public class SegmentCommitInfo
-    {
-        /// <summary>
-        /// The <seealso cref="SegmentInfo"/> that we wrap. </summary>
-        public SegmentInfo Info { get; private set; }
-
-        // How many deleted docs in the segment:
-        private int delCount;
-
-        // Generation number of the live docs file (-1 if there
-        // are no deletes yet):
-        private long delGen;
-
-        // Normally 1+delGen, unless an exception was hit on last
-        // attempt to write:
-        private long nextWriteDelGen;
-
-        // Generation number of the FieldInfos (-1 if there are no updates)
-        private long fieldInfosGen;
-
-        // Normally 1 + fieldInfosGen, unless an exception was hit on last attempt to
-        // write
-        private long nextWriteFieldInfosGen;
-
-        // Track the per-generation updates files
-        private readonly IDictionary<long, ISet<string>> genUpdatesFiles = new Dictionary<long, ISet<string>>();
-
-        private long sizeInBytes = -1; // LUCENENET NOTE: This was volatile in the original, but long cannot be volatile in .NET
-
-        /// <summary>
-        /// Sole constructor.
-        /// </summary>
-        /// <param name="info">
-        ///          <seealso cref="SegmentInfo"/> that we wrap </param>
-        /// <param name="delCount">
-        ///          number of deleted documents in this segment </param>
-        /// <param name="delGen">
-        ///          deletion generation number (used to name deletion files) </param>
-        /// <param name="fieldInfosGen">
-        ///          FieldInfos generation number (used to name field-infos files)
-        ///  </param>
-        public SegmentCommitInfo(SegmentInfo info, int delCount, long delGen, long fieldInfosGen)
-        {
-            this.Info = info;
-            this.delCount = delCount;
-            this.delGen = delGen;
-            if (delGen == -1)
-            {
-                nextWriteDelGen = 1;
-            }
-            else
-            {
-                nextWriteDelGen = delGen + 1;
-            }
-
-            this.fieldInfosGen = fieldInfosGen;
-            if (fieldInfosGen == -1)
-            {
-                nextWriteFieldInfosGen = 1;
-            }
-            else
-            {
-                nextWriteFieldInfosGen = fieldInfosGen + 1;
-            }
-        }
-
-        /// <summary>
-        /// Returns the per generation updates files. </summary>
-        public virtual IDictionary<long, ISet<string>> UpdatesFiles
-        {
-            get
-            {
-                return Collections.UnmodifiableMap(genUpdatesFiles);
-            }
-        }
-
-        /// <summary>
-        /// Sets the updates file names per generation. Does not deep clone the map. </summary>
-        public virtual void SetGenUpdatesFiles(IDictionary<long, ISet<string>> genUpdatesFiles)
-        {
-            this.genUpdatesFiles.Clear();
-            this.genUpdatesFiles.PutAll(genUpdatesFiles);
-        }
-
-        /// <summary>
-        /// Called when we succeed in writing deletes </summary>
-        internal virtual void AdvanceDelGen()
-        {
-            delGen = nextWriteDelGen;
-            nextWriteDelGen = delGen + 1;
-            sizeInBytes = -1;
-        }
-
-        /// <summary>
-        /// Called if there was an exception while writing
-        ///  deletes, so that we don't try to write to the same
-        ///  file more than once.
-        /// </summary>
-        internal virtual void AdvanceNextWriteDelGen()
-        {
-            nextWriteDelGen++;
-        }
-
-        /// <summary>
-        /// Called when we succeed in writing a new FieldInfos generation. </summary>
-        internal virtual void AdvanceFieldInfosGen()
-        {
-            fieldInfosGen = nextWriteFieldInfosGen;
-            nextWriteFieldInfosGen = fieldInfosGen + 1;
-            sizeInBytes = -1;
-        }
-
-        /// <summary>
-        /// Called if there was an exception while writing a new generation of
-        /// FieldInfos, so that we don't try to write to the same file more than once.
-        /// </summary>
-        internal virtual void AdvanceNextWriteFieldInfosGen()
-        {
-            nextWriteFieldInfosGen++;
-        }
-
-        /// <summary>
-        /// Returns total size in bytes of all files for this
-        ///  segment.
-        /// <p><b>NOTE:</b> this value is not correct for 3.0 segments
-        /// that have shared docstores. To get the correct value, upgrade!
-        /// </summary>
-        public virtual long GetSizeInBytes()
-        {
-            if (sizeInBytes == -1)
-            {
-                long sum = 0;
-                foreach (string fileName in Files())
-                {
-                    sum += Info.Dir.FileLength(fileName);
-                }
-                sizeInBytes = sum;
-            }
-
-            return sizeInBytes;
-        }
-
-        /// <summary>
-        /// Returns all files in use by this segment. </summary>
-        public virtual ICollection<string> Files()
-        {
-            // Start from the wrapped info's files:
-            ISet<string> files = new HashSet<string>(Info.GetFiles());
-
-            // TODO we could rely on TrackingDir.getCreatedFiles() (like we do for
-            // updates) and then maybe even be able to remove LiveDocsFormat.files().
-
-            // Must separately add any live docs files:
-            Info.Codec.LiveDocsFormat.Files(this, files);
-
-            // Must separately add any field updates files
-            foreach (ISet<string> updateFiles in genUpdatesFiles.Values)
-            {
-                Collections.AddAll(files, updateFiles);
-            }
-
-            return files;
-        }
-
-        // NOTE: only used in-RAM by IW to track buffered deletes;
-        // this is never written to/read from the Directory
-        private long bufferedDeletesGen;
-
-        internal virtual long BufferedDeletesGen
-        {
-            get
-            {
-                return bufferedDeletesGen;
-            }
-        }
-
-        internal void SetBufferedDeletesGen(long value)
-        {
-            bufferedDeletesGen = value;
-            sizeInBytes = -1;
-        }
-
-        /// <summary>
-        /// Returns true if there are any deletions for the
-        /// segment at this commit.
-        /// </summary>
-        public virtual bool HasDeletions
-        {
-            get { return delGen != -1; }
-        }
-
-        /// <summary>
-        /// Returns true if there are any field updates for the segment in this commit. </summary>
-        public virtual bool HasFieldUpdates
-        {
-            get { return fieldInfosGen != -1; }
-        }
-
-        /// <summary>
-        /// Returns the next available generation number of the FieldInfos files. </summary>
-        public virtual long NextFieldInfosGen
-        {
-            get
-            {
-                return nextWriteFieldInfosGen;
-            }
-        }
-
-        /// <summary>
-        /// Returns the generation number of the field infos file or -1 if there are no
-        /// field updates yet.
-        /// </summary>
-        public virtual long FieldInfosGen
-        {
-            get
-            {
-                return fieldInfosGen;
-            }
-        }
-
-        /// <summary>
-        /// Returns the next available generation number
-        /// of the live docs file.
-        /// </summary>
-        public virtual long NextDelGen
-        {
-            get
-            {
-                return nextWriteDelGen;
-            }
-        }
-
-        /// <summary>
-        /// Returns generation number of the live docs file
-        /// or -1 if there are no deletes yet.
-        /// </summary>
-        public virtual long DelGen
-        {
-            get
-            {
-                return delGen;
-            }
-        }
-
-        /// <summary>
-        /// Returns the number of deleted docs in the segment.
-        /// </summary>
-        public virtual int DelCount
-        {
-            get
-            {
-                return delCount;
-            }
-            internal set
-            {
-                if (value < 0 || value > Info.DocCount)
-                {
-                    throw new System.ArgumentException("invalid delCount=" + value + " (docCount=" + Info.DocCount + ")");
-                }
-                this.delCount = value;
-            }
-        }
-
-        /// <summary>
-        /// Returns a description of this segment. </summary>
-        public virtual string ToString(Directory dir, int pendingDelCount)
-        {
-            string s = Info.ToString(dir, delCount + pendingDelCount);
-            if (delGen != -1)
-            {
-                s += ":delGen=" + delGen;
-            }
-            if (fieldInfosGen != -1)
-            {
-                s += ":fieldInfosGen=" + fieldInfosGen;
-            }
-            return s;
-        }
-
-        public override string ToString()
-        {
-            return ToString(Info.Dir, 0);
-        }
-
-        public virtual object Clone()
-        {
-            SegmentCommitInfo other = new SegmentCommitInfo(Info, delCount, delGen, fieldInfosGen);
-            // Not clear that we need to carry over nextWriteDelGen
-            // (i.e. do we ever clone after a failed write and
-            // before the next successful write?), but just do it to
-            // be safe:
-            other.nextWriteDelGen = nextWriteDelGen;
-            other.nextWriteFieldInfosGen = nextWriteFieldInfosGen;
-
-            // deep clone
-            foreach (KeyValuePair<long, ISet<string>> e in genUpdatesFiles)
-            {
-                other.genUpdatesFiles[e.Key] = new HashSet<string>(e.Value);
-            }
-
-            return other;
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Index/SegmentCoreReaders.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/SegmentCoreReaders.cs b/src/Lucene.Net.Core/Index/SegmentCoreReaders.cs
deleted file mode 100644
index ef9e870..0000000
--- a/src/Lucene.Net.Core/Index/SegmentCoreReaders.cs
+++ /dev/null
@@ -1,315 +0,0 @@
-using Lucene.Net.Codecs;
-using Lucene.Net.Support;
-using Lucene.Net.Util;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Reflection;
-
-namespace Lucene.Net.Index
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Codec = Lucene.Net.Codecs.Codec;
-    using CompoundFileDirectory = Lucene.Net.Store.CompoundFileDirectory;
-    using Directory = Lucene.Net.Store.Directory;
-    using DocValuesProducer = Lucene.Net.Codecs.DocValuesProducer;
-    using FieldsProducer = Lucene.Net.Codecs.FieldsProducer;
-    using ICoreClosedListener = Lucene.Net.Index.SegmentReader.ICoreClosedListener;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-    using PostingsFormat = Lucene.Net.Codecs.PostingsFormat;
-    using StoredFieldsReader = Lucene.Net.Codecs.StoredFieldsReader;
-    using TermVectorsReader = Lucene.Net.Codecs.TermVectorsReader;
-
-    /// <summary>
-    /// Holds core readers that are shared (unchanged) when
-    /// SegmentReader is cloned or reopened
-    /// </summary>
-#if FEATURE_SERIALIZABLE
-    [Serializable]
-#endif
-    internal sealed class SegmentCoreReaders
-    {
-        // Counts how many other readers share the core objects
-        // (freqStream, proxStream, tis, etc.) of this reader;
-        // when coreRef drops to 0, these core objects may be
-        // closed.  A given instance of SegmentReader may be
-        // closed, even though it shares core objects with other
-        // SegmentReaders:
-        private readonly AtomicInt32 @ref = new AtomicInt32(1);
-
-        internal readonly FieldsProducer fields;
-        internal readonly DocValuesProducer normsProducer;
-
-        internal readonly int termsIndexDivisor;
-
-        internal readonly StoredFieldsReader fieldsReaderOrig;
-        internal readonly TermVectorsReader termVectorsReaderOrig;
-        internal readonly CompoundFileDirectory cfsReader;
-
-        // TODO: make a single thread local w/ a
-        // Thingy class holding fieldsReader, termVectorsReader,
-        // normsProducer
-
-        internal readonly DisposableThreadLocal<StoredFieldsReader> fieldsReaderLocal;
-
-        private class AnonymousFieldsReaderLocal : DisposableThreadLocal<StoredFieldsReader>
-        {
-            private readonly SegmentCoreReaders outerInstance;
-
-            public AnonymousFieldsReaderLocal(SegmentCoreReaders outerInstance)
-            {
-                this.outerInstance = outerInstance;
-            }
-
-            protected internal override StoredFieldsReader InitialValue()
-            {
-                return (StoredFieldsReader)outerInstance.fieldsReaderOrig.Clone();
-            }
-        }
-
-        internal readonly DisposableThreadLocal<TermVectorsReader> termVectorsLocal;
-
-        private class AnonymousTermVectorsLocal : DisposableThreadLocal<TermVectorsReader>
-        {
-            private readonly SegmentCoreReaders outerInstance;
-
-            public AnonymousTermVectorsLocal(SegmentCoreReaders outerInstance)
-            {
-                this.outerInstance = outerInstance;
-            }
-
-            protected internal override TermVectorsReader InitialValue()
-            {
-                return (outerInstance.termVectorsReaderOrig == null) ? null : (TermVectorsReader)outerInstance.termVectorsReaderOrig.Clone();
-            }
-        }
-
-        internal readonly DisposableThreadLocal<IDictionary<string, object>> normsLocal = new DisposableThreadLocalAnonymousInnerClassHelper3();
-
-        private class DisposableThreadLocalAnonymousInnerClassHelper3 : DisposableThreadLocal<IDictionary<string, object>>
-        {
-            public DisposableThreadLocalAnonymousInnerClassHelper3()
-            {
-            }
-
-            protected internal override IDictionary<string, object> InitialValue()
-            {
-                return new Dictionary<string, object>();
-            }
-        }
-
-        private readonly ISet<ICoreClosedListener> coreClosedListeners = new ConcurrentHashSet<ICoreClosedListener>(new IdentityComparer<ICoreClosedListener>());
-
-        internal SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentCommitInfo si, IOContext context, int termsIndexDivisor)
-        {
-            fieldsReaderLocal = new AnonymousFieldsReaderLocal(this);
-            termVectorsLocal = new AnonymousTermVectorsLocal(this);
-
-            if (termsIndexDivisor == 0)
-            {
-                throw new System.ArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)");
-            }
-
-            Codec codec = si.Info.Codec;
-            Directory cfsDir; // confusing name: if (cfs) its the cfsdir, otherwise its the segment's directory.
-
-            bool success = false;
-
-            try
-            {
-                if (si.Info.UseCompoundFile)
-                {
-                    cfsDir = cfsReader = new CompoundFileDirectory(dir, IndexFileNames.SegmentFileName(si.Info.Name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context, false);
-                }
-                else
-                {
-                    cfsReader = null;
-                    cfsDir = dir;
-                }
-
-                FieldInfos fieldInfos = owner.FieldInfos;
-
-                this.termsIndexDivisor = termsIndexDivisor;
-                PostingsFormat format = codec.PostingsFormat;
-                SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si.Info, fieldInfos, context, termsIndexDivisor);
-                // Ask codec for its Fields
-                fields = format.FieldsProducer(segmentReadState);
-                Debug.Assert(fields != null);
-                // ask codec for its Norms:
-                // TODO: since we don't write any norms file if there are no norms,
-                // kinda jaky to assume the codec handles the case of no norms file at all gracefully?!
-
-                if (fieldInfos.HasNorms)
-                {
-                    normsProducer = codec.NormsFormat.NormsProducer(segmentReadState);
-                    Debug.Assert(normsProducer != null);
-                }
-                else
-                {
-                    normsProducer = null;
-                }
-
-                // LUCENENET TODO: EXCEPTIONS Not sure why this catch block is swallowing AccessViolationException, 
-                // because it didn't exist in Lucene. Is it really needed? AVE is for protected memory...could
-                // this be needed because we are using unchecked??
-
-#if !NETSTANDARD
-                try
-                {
-#endif
-                    fieldsReaderOrig = si.Info.Codec.StoredFieldsFormat.FieldsReader(cfsDir, si.Info, fieldInfos, context);
-#if !NETSTANDARD
-                }
-#pragma warning disable 168
-                catch (System.AccessViolationException ave)
-#pragma warning restore 168
-                {
-                }
-#endif
-
-                if (fieldInfos.HasVectors) // open term vector files only as needed
-                {
-                    termVectorsReaderOrig = si.Info.Codec.TermVectorsFormat.VectorsReader(cfsDir, si.Info, fieldInfos, context);
-                }
-                else
-                {
-                    termVectorsReaderOrig = null;
-                }
-
-                success = true;
-            }
-            finally
-            {
-                if (!success)
-                {
-                    DecRef();
-                }
-            }
-        }
-
-        internal int RefCount
-        {
-            get
-            {
-                return @ref.Get();
-            }
-        }
-
-        internal void IncRef()
-        {
-            int count;
-            while ((count = @ref.Get()) > 0)
-            {
-                if (@ref.CompareAndSet(count, count + 1))
-                {
-                    return;
-                }
-            }
-            throw new ObjectDisposedException(this.GetType().GetTypeInfo().FullName, "SegmentCoreReaders is already closed");
-        }
-
-        internal NumericDocValues GetNormValues(FieldInfo fi)
-        {
-            Debug.Assert(normsProducer != null);
-
-            IDictionary<string, object> normFields = normsLocal.Get();
-
-            object ret;
-            normFields.TryGetValue(fi.Name, out ret);
-            var norms = ret as NumericDocValues;
-            if (norms == null)
-            {
-                norms = normsProducer.GetNumeric(fi);
-                normFields[fi.Name] = norms;
-            }
-
-            return norms;
-        }
-
-        internal void DecRef()
-        {
-            if (@ref.DecrementAndGet() == 0)
-            {
-                Exception th = null;
-                try
-                {
-                    IOUtils.Close(termVectorsLocal, fieldsReaderLocal, normsLocal, fields, termVectorsReaderOrig, fieldsReaderOrig, cfsReader, normsProducer);
-                }
-                catch (Exception throwable)
-                {
-                    th = throwable;
-                }
-                finally
-                {
-                    NotifyCoreClosedListeners(th);
-                }
-            }
-        }
-
-        private void NotifyCoreClosedListeners(Exception th)
-        {
-            lock (coreClosedListeners)
-            {
-                foreach (ICoreClosedListener listener in coreClosedListeners)
-                {
-                    // SegmentReader uses our instance as its
-                    // coreCacheKey:
-                    try
-                    {
-                        listener.OnClose(this);
-                    }
-                    catch (Exception t)
-                    {
-                        
-                        if (th == null)
-                        {
-                            th = t;
-                        }
-                        else
-                        {
-                            th.AddSuppressed(t);
-                        }
-                    }
-                }
-                IOUtils.ReThrowUnchecked(th);
-            }
-        }
-
-        internal void AddCoreClosedListener(ICoreClosedListener listener)
-        {
-            coreClosedListeners.Add(listener);
-        }
-
-        internal void RemoveCoreClosedListener(ICoreClosedListener listener)
-        {
-            coreClosedListeners.Remove(listener);
-        }
-
-        /// <summary>
-        /// Returns approximate RAM bytes used </summary>
-        public long RamBytesUsed()
-        {
-            return ((normsProducer != null) ? normsProducer.RamBytesUsed() : 0) + 
-                ((fields != null) ? fields.RamBytesUsed() : 0) + 
-                ((fieldsReaderOrig != null) ? fieldsReaderOrig.RamBytesUsed() : 0) + 
-                ((termVectorsReaderOrig != null) ? termVectorsReaderOrig.RamBytesUsed() : 0);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Index/SegmentDocValues.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/SegmentDocValues.cs b/src/Lucene.Net.Core/Index/SegmentDocValues.cs
deleted file mode 100644
index d7992a5..0000000
--- a/src/Lucene.Net.Core/Index/SegmentDocValues.cs
+++ /dev/null
@@ -1,137 +0,0 @@
-using Lucene.Net.Util;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Globalization;
-using System.Linq;
-
-namespace Lucene.Net.Index
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Directory = Lucene.Net.Store.Directory;
-    using DocValuesFormat = Lucene.Net.Codecs.DocValuesFormat;
-    using DocValuesProducer = Lucene.Net.Codecs.DocValuesProducer;
-    using IOContext = Lucene.Net.Store.IOContext;
-    using IOUtils = Lucene.Net.Util.IOUtils;
-
-    /// <summary>
-    /// Manages the <seealso cref="DocValuesProducer"/> held by <seealso cref="SegmentReader"/> and
-    /// keeps track of their reference counting.
-    /// </summary>
-#if FEATURE_SERIALIZABLE
-    [Serializable]
-#endif
-    internal sealed class SegmentDocValues
-    {
-        private readonly IDictionary<long?, RefCount<DocValuesProducer>> genDVProducers = new Dictionary<long?, RefCount<DocValuesProducer>>();
-
-        private RefCount<DocValuesProducer> NewDocValuesProducer(SegmentCommitInfo si, IOContext context, Directory dir, DocValuesFormat dvFormat, long? gen, IList<FieldInfo> infos, int termsIndexDivisor)
-        {
-            Directory dvDir = dir;
-            string segmentSuffix = "";
-            if ((long)gen != -1)
-            {
-                dvDir = si.Info.Dir; // gen'd files are written outside CFS, so use SegInfo directory
-                segmentSuffix = ((long)gen).ToString(CultureInfo.InvariantCulture);//Convert.ToString((long)gen, Character.MAX_RADIX);
-            }
-
-            // set SegmentReadState to list only the fields that are relevant to that gen
-            SegmentReadState srs = new SegmentReadState(dvDir, si.Info, new FieldInfos(infos.ToArray()), context, termsIndexDivisor, segmentSuffix);
-            return new RefCountHelper(this, dvFormat.FieldsProducer(srs), gen);
-        }
-
-#if FEATURE_SERIALIZABLE
-        [Serializable]
-#endif
-        private class RefCountHelper : RefCount<DocValuesProducer>
-        {
-            private readonly SegmentDocValues outerInstance;
-            private long? gen;
-
-            public RefCountHelper(SegmentDocValues outerInstance, DocValuesProducer fieldsProducer, long? gen)
-                : base(fieldsProducer)
-            {
-                this.outerInstance = outerInstance;
-                this.gen = gen;
-            }
-
-            protected override void Release()
-            {
-                m_object.Dispose();
-                lock (outerInstance)
-                {
-                    outerInstance.genDVProducers.Remove(gen);
-                }
-            }
-        }
-
-        /// <summary>
-        /// Returns the <seealso cref="DocValuesProducer"/> for the given generation. </summary>
-        internal DocValuesProducer GetDocValuesProducer(long? gen, SegmentCommitInfo si, IOContext context, Directory dir, DocValuesFormat dvFormat, IList<FieldInfo> infos, int termsIndexDivisor)
-        {
-            lock (this)
-            {
-                RefCount<DocValuesProducer> dvp;
-                if (!(genDVProducers.TryGetValue(gen, out dvp)))
-                {
-                    dvp = NewDocValuesProducer(si, context, dir, dvFormat, gen, infos, termsIndexDivisor);
-                    Debug.Assert(dvp != null);
-                    genDVProducers[gen] = dvp;
-                }
-                else
-                {
-                    dvp.IncRef();
-                }
-                return dvp.Get();
-            }
-        }
-
-        /// <summary>
-        /// Decrement the reference count of the given <seealso cref="DocValuesProducer"/>
-        /// generations.
-        /// </summary>
-        internal void DecRef(IList<long?> dvProducersGens)
-        {
-            lock (this)
-            {
-                Exception t = null;
-                foreach (long? gen in dvProducersGens)
-                {
-                    RefCount<DocValuesProducer> dvp = genDVProducers[gen];
-                    Debug.Assert(dvp != null, "gen=" + gen);
-                    try
-                    {
-                        dvp.DecRef();
-                    }
-                    catch (Exception th)
-                    {
-                        if (t != null)
-                        {
-                            t = th;
-                        }
-                    }
-                }
-                if (t != null)
-                {
-                    IOUtils.ReThrow(t);
-                }
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/a5dc68d0/src/Lucene.Net.Core/Index/SegmentInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Index/SegmentInfo.cs b/src/Lucene.Net.Core/Index/SegmentInfo.cs
deleted file mode 100644
index 2c342b6..0000000
--- a/src/Lucene.Net.Core/Index/SegmentInfo.cs
+++ /dev/null
@@ -1,386 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.Text;
-using System.Text.RegularExpressions;
-
-namespace Lucene.Net.Index
-{
-    /*
-     * Licensed to the Apache Software Foundation (ASF) under one or more
-     * contributor license agreements.  See the NOTICE file distributed with
-     * this work for additional information regarding copyright ownership.
-     * The ASF licenses this file to You under the Apache License, Version 2.0
-     * (the "License"); you may not use this file except in compliance with
-     * the License.  You may obtain a copy of the License at
-     *
-     *     http://www.apache.org/licenses/LICENSE-2.0
-     *
-     * Unless required by applicable law or agreed to in writing, software
-     * distributed under the License is distributed on an "AS IS" BASIS,
-     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     * See the License for the specific language governing permissions and
-     * limitations under the License.
-     */
-
-    using Codec = Lucene.Net.Codecs.Codec;
-    using Directory = Lucene.Net.Store.Directory;
-    using Lucene3xSegmentInfoFormat = Lucene.Net.Codecs.Lucene3x.Lucene3xSegmentInfoFormat;
-    using TrackingDirectoryWrapper = Lucene.Net.Store.TrackingDirectoryWrapper;
-
-    /// <summary>
-    /// Information about a segment such as it's name, directory, and files related
-    /// to the segment.
-    ///
-    /// @lucene.experimental
-    /// </summary>
-#if FEATURE_SERIALIZABLE
-    [Serializable]
-#endif
-    public sealed class SegmentInfo
-    {
-        // TODO: remove these from this class, for now this is the representation
-        /// <summary>
-        /// Used by some member fields to mean not present (e.g.,
-        ///  norms, deletions).
-        /// </summary>
-        public static readonly int NO = -1; // e.g. no norms; no deletes;
-
-        /// <summary>
-        /// Used by some member fields to mean present (e.g.,
-        ///  norms, deletions).
-        /// </summary>
-        public static readonly int YES = 1; // e.g. have norms; have deletes;
-
-        /// <summary>
-        /// Unique segment name in the directory. </summary>
-        public string Name { get; private set; }
-
-        private int docCount; // number of docs in seg
-
-        /// <summary>
-        /// Where this segment resides. </summary>
-        public Directory Dir { get; private set; }
-
-        private bool isCompoundFile;
-
-        private Codec codec;
-
-        private IDictionary<string, string> diagnostics;
-
-        /// @deprecated not used anymore
-        [Obsolete("not used anymore")]
-        private IDictionary<string, string> attributes;
-
-        // Tracks the Lucene version this segment was created with, since 3.1. Null
-        // indicates an older than 3.0 index, and it's used to detect a too old index.
-        // The format expected is "x.y" - "2.x" for pre-3.0 indexes (or null), and
-        // specific versions afterwards ("3.0", "3.1" etc.).
-        // see Constants.LUCENE_MAIN_VERSION.
-        private string version;
-
-        public IDictionary<string, string> Diagnostics
-        {
-            set
-            {
-                this.diagnostics = value;
-            }
-            get
-            {
-                return diagnostics;
-            }
-        }
-
-        /// <summary>
-        /// Construct a new complete SegmentInfo instance from input.
-        /// <p>Note: this is public only to allow access from
-        /// the codecs package.</p>
-        /// </summary>
-        public SegmentInfo(Directory dir, string version, string name, int docCount, bool isCompoundFile, Codec codec, IDictionary<string, string> diagnostics)
-            : this(dir, version, name, docCount, isCompoundFile, codec, diagnostics, null)
-        {
-        }
-
-        /// <summary>
-        /// Construct a new complete SegmentInfo instance from input.
-        /// <p>Note: this is public only to allow access from
-        /// the codecs package.</p>
-        /// </summary>
-        public SegmentInfo(Directory dir, string version, string name, int docCount, bool isCompoundFile, Codec codec, IDictionary<string, string> diagnostics, IDictionary<string, string> attributes)
-        {
-            Debug.Assert(!(dir is TrackingDirectoryWrapper));
-            this.Dir = dir;
-            this.version = version;
-            this.Name = name;
-            this.docCount = docCount;
-            this.isCompoundFile = isCompoundFile;
-            this.codec = codec;
-            this.diagnostics = diagnostics;
-#pragma warning disable 612, 618
-            this.attributes = attributes;
-#pragma warning restore 612, 618
-        }
-
-        /// @deprecated separate norms are not supported in >= 4.0
-        [Obsolete("separate norms are not supported in >= 4.0")]
-        internal bool HasSeparateNorms
-        {
-            get { return GetAttribute(Lucene3xSegmentInfoFormat.NORMGEN_KEY) != null; }
-        }
-
-        /// <summary>
-        /// Mark whether this segment is stored as a compound file.
-        /// </summary>
-        /// <param name="isCompoundFile"> true if this is a compound file;
-        /// else, false </param>
-        public bool UseCompoundFile
-        {
-            set
-            {
-                this.isCompoundFile = value;
-            }
-            get
-            {
-                return isCompoundFile;
-            }
-        }
-
-        /// <summary>
-        /// Can only be called once. </summary>
-        public Codec Codec
-        {
-            set
-            {
-                Debug.Assert(this.codec == null);
-                if (value == null)
-                {
-                    throw new System.ArgumentException("codec must be non-null");
-                }
-                this.codec = value;
-            }
-            get
-            {
-                return codec;
-            }
-        }
-
-        /// <summary>
-        /// Returns number of documents in this segment (deletions
-        ///  are not taken into account).
-        /// </summary>
-        public int DocCount
-        {
-            get
-            {
-                if (this.docCount == -1)
-                {
-                    throw new InvalidOperationException("docCount isn't set yet");
-                }
-                return docCount;
-            }
-            internal set // NOTE: leave package private
-            {
-                if (this.docCount != -1)
-                {
-                    throw new InvalidOperationException("docCount was already set");
-                }
-                this.docCount = value;
-            }
-        }
-
-        /// <summary>
-        /// Return all files referenced by this SegmentInfo. </summary>
-        public ISet<string> GetFiles()
-        {
-            if (setFiles == null)
-            {
-                throw new InvalidOperationException("files were not computed yet");
-            }
-            return Collections.UnmodifiableSet(setFiles);
-        }
-
-        public override string ToString()
-        {
-            return ToString(Dir, 0);
-        }
-
-        /// <summary>
-        /// Used for debugging.  Format may suddenly change.
-        ///
-        ///  <p>Current format looks like
-        ///  <code>_a(3.1):c45/4</code>, which means the segment's
-        ///  name is <code>_a</code>; it was created with Lucene 3.1 (or
-        ///  '?' if it's unknown); it's using compound file
-        ///  format (would be <code>C</code> if not compound); it
-        ///  has 45 documents; it has 4 deletions (this part is
-        ///  left off when there are no deletions).</p>
-        /// </summary>
-        public string ToString(Directory dir, int delCount)
-        {
-            StringBuilder s = new StringBuilder();
-            s.Append(Name).Append('(').Append(version == null ? "?" : version).Append(')').Append(':');
-            char cfs = UseCompoundFile ? 'c' : 'C';
-            s.Append(cfs);
-
-            if (this.Dir != dir)
-            {
-                s.Append('x');
-            }
-            s.Append(docCount);
-
-            if (delCount != 0)
-            {
-                s.Append('/').Append(delCount);
-            }
-
-            // TODO: we could append toString of attributes() here?
-
-            return s.ToString();
-        }
-
-        /// <summary>
-        /// We consider another SegmentInfo instance equal if it
-        ///  has the same dir and same name.
-        /// </summary>
-        public override bool Equals(object obj)
-        {
-            if (this == obj)
-            {
-                return true;
-            }
-            if (obj is SegmentInfo)
-            {
-                SegmentInfo other = (SegmentInfo)obj;
-                return other.Dir == Dir && other.Name.Equals(Name, StringComparison.Ordinal);
-            }
-            else
-            {
-                return false;
-            }
-        }
-
-        public override int GetHashCode()
-        {
-            return Dir.GetHashCode() + Name.GetHashCode();
-        }
-
-        /// <summary>
-        /// Used by DefaultSegmentInfosReader to upgrade a 3.0 segment to record its
-        /// version is "3.0". this method can be removed when we're not required to
-        /// support 3x indexes anymore, e.g. in 5.0.
-        /// <p>
-        /// <b>NOTE:</b> this method is used for internal purposes only - you should
-        /// not modify the version of a SegmentInfo, or it may result in unexpected
-        /// exceptions thrown when you attempt to open the index.
-        ///
-        /// @lucene.internal
-        /// </summary>
-        public string Version
-        {
-            set
-            {
-                this.version = value;
-            }
-            get
-            {
-                return version;
-            }
-        }
-
-        private ISet<string> setFiles;
-
-        /// <summary>
-        /// Sets the files written for this segment. </summary>
-        public void SetFiles(ISet<string> files)
-        {
-            CheckFileNames(files);
-            setFiles = files;
-        }
-
-        /// <summary>
-        /// Add these files to the set of files written for this
-        ///  segment.
-        /// </summary>
-        public void AddFiles(ICollection<string> files)
-        {
-            CheckFileNames(files);
-            //SetFiles.AddAll(files);
-            setFiles.UnionWith(files);
-        }
-
-        /// <summary>
-        /// Add this file to the set of files written for this
-        ///  segment.
-        /// </summary>
-        public void AddFile(string file)
-        {
-            //CheckFileNames(Collections.Singleton(file));
-            CheckFileNames(new[] { file });
-            setFiles.Add(file);
-        }
-
-        private void CheckFileNames(ICollection<string> files)
-        {
-            Regex r = IndexFileNames.CODEC_FILE_PATTERN;
-            foreach (string file in files)
-            {
-                if (!r.IsMatch(file))
-                {
-                    throw new System.ArgumentException("invalid codec filename '" + file + "', must match: " + IndexFileNames.CODEC_FILE_PATTERN.ToString());
-                }
-            }
-        }
-
-        /// <summary>
-        /// Get a codec attribute value, or null if it does not exist
-        /// </summary>
-        /// @deprecated no longer supported
-        [Obsolete("no longer supported")]
-        public string GetAttribute(string key)
-        {
-            if (attributes == null)
-            {
-                return null;
-            }
-            else
-            {
-                string attribute;
-                attributes.TryGetValue(key, out attribute);
-                return attribute;
-            }
-        }
-
-        /// <summary>
-        /// Puts a codec attribute value.
-        /// <p>
-        /// this is a key-value mapping for the field that the codec can use to store
-        /// additional metadata, and will be available to the codec when reading the
-        /// segment via <seealso cref="#getAttribute(String)"/>
-        /// <p>
-        /// If a value already exists for the field, it will be replaced with the new
-        /// value.
-        /// </summary>
-        /// @deprecated no longer supported
-        [Obsolete("no longer supported")]
-        public string PutAttribute(string key, string value)
-        {
-            if (attributes == null)
-            {
-                attributes = new Dictionary<string, string>();
-            }
-            return attributes[key] = value;
-        }
-
-        /// <summary>
-        /// Returns the internal codec attributes map.
-        /// </summary>
-        /// <returns> internal codec attributes map. May be null if no mappings exist.
-        /// </returns>
-        /// @deprecated no longer supported
-        [Obsolete("no longer supported")]
-        public IDictionary<string, string> Attributes
-        {
-            get { return attributes; }
-        }
-    }
-}
\ No newline at end of file


Mime
View raw message