lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject svn commit: r911154 [1/2] - in /lucene/lucene.net/trunk/C#/src: Lucene.Net/ Lucene.Net/Analysis/ Lucene.Net/Index/ Lucene.Net/Search/ Lucene.Net/Search/Function/ Lucene.Net/Search/Payloads/ Lucene.Net/Store/ Lucene.Net/Util/ Test/ Test/Analysis/ Test/I...
Date Wed, 17 Feb 2010 19:33:05 GMT
Author: digy
Date: Wed Feb 17 19:33:03 2010
New Revision: 911154

URL: http://svn.apache.org/viewvc?rev=911154&view=rev
Log:
LUCENENET-336 Shortcut for Lucene.NET 2.9.1 --> 2.9.2

Added:
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AbstractAllTermDocs.cs
Modified:
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Analysis/Tokenizer.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/AssemblyInfo.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AllTermDocs.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/BufferedDeletes.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/DocFieldProcessorPerThread.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/DocumentsWriter.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/IndexWriter.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/SegmentTermEnum.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/TermInfosReader.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.csproj
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/BooleanQuery.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Collector.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Filter.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Function/CustomScoreQuery.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/FuzzyTermEnum.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/NumericRangeQuery.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MaxPayloadFunction.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MinPayloadFunction.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/SortField.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopDocs.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopFieldDocs.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/NIOFSDirectory.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMDirectory.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMFile.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/AttributeSource.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Constants.cs
    lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Version.cs
    lucene/lucene.net/trunk/C#/src/Test/Analysis/BaseTokenStreamTestCase.cs
    lucene/lucene.net/trunk/C#/src/Test/Analysis/TestCachingTokenFilter.cs
    lucene/lucene.net/trunk/C#/src/Test/Analysis/TestTeeSinkTokenFilter.cs
    lucene/lucene.net/trunk/C#/src/Test/AssemblyInfo.cs
    lucene/lucene.net/trunk/C#/src/Test/Index/TestDocumentWriter.cs
    lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriter.cs
    lucene/lucene.net/trunk/C#/src/Test/Index/TestIndexWriterReader.cs
    lucene/lucene.net/trunk/C#/src/Test/Index/TestPayloads.cs
    lucene/lucene.net/trunk/C#/src/Test/Index/TestTermVectorsReader.cs
    lucene/lucene.net/trunk/C#/src/Test/Index/TestTermdocPerf.cs
    lucene/lucene.net/trunk/C#/src/Test/QueryParser/TestQueryParser.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/Function/FunctionTestSetup.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestCustomScoreQuery.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestFieldScoreQuery.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/Function/TestOrdValues.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/QueryUtils.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/TestFilteredSearch.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/TestPositionIncrement.cs
    lucene/lucene.net/trunk/C#/src/Test/Search/TestTermRangeQuery.cs
    lucene/lucene.net/trunk/C#/src/Test/Store/TestRAMDirectory.cs
    lucene/lucene.net/trunk/C#/src/Test/TestSupportClass.cs
    lucene/lucene.net/trunk/C#/src/Test/Util/TestAttributeSource.cs

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Analysis/Tokenizer.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Analysis/Tokenizer.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Analysis/Tokenizer.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Analysis/Tokenizer.cs Wed Feb 17 19:33:03 2010
@@ -26,7 +26,7 @@
 	/// <p/>
 	/// This is an abstract class; subclasses must override {@link #IncrementToken()}
 	/// <p/>
-	/// NOTE: Subclasses overriding {@link #IncrementToken()} must call
+    /// NOTE: Subclasses overriding {@link #next(Token)} must call
 	/// {@link AttributeSource#ClearAttributes()} before setting attributes.
 	/// Subclasses overriding {@link #IncrementToken()} must call
 	/// {@link Token#Clear()} before setting Token attributes.

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/AssemblyInfo.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/AssemblyInfo.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/AssemblyInfo.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/AssemblyInfo.cs Wed Feb 17 19:33:03 2010
@@ -33,7 +33,7 @@
 [assembly: AssemblyDefaultAlias("Lucene.Net")]
 [assembly: AssemblyCulture("")]
 
-[assembly: AssemblyInformationalVersionAttribute("2.9.1")]
+[assembly: AssemblyInformationalVersionAttribute("2.9.2")]
 
 
 //
@@ -47,7 +47,7 @@
 // You can specify all the values or you can default the Revision and Build Numbers 
 // by using the '*' as shown below:
 
-[assembly: AssemblyVersion("2.9.1.002")]
+[assembly: AssemblyVersion("2.9.2.001")]
 
 
 //

Added: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AbstractAllTermDocs.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/AbstractAllTermDocs.cs?rev=911154&view=auto
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AbstractAllTermDocs.cs (added)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AbstractAllTermDocs.cs Wed Feb 17 19:33:03 2010
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Lucene.Net.Index
+{
+    /// <summary>
+    /// Base class for enumerating all but deleted docs.
+    /// 
+    /// <p/>NOTE: this class is meant only to be used internally
+    /// by Lucene; it's only public so it can be shared across
+    /// packages.  This means the API is freely subject to
+    /// change, and, the class could be removed entirely, in any
+    /// Lucene release.  Use directly at your own risk! */
+    /// </summary>
+    public abstract class AbstractAllTermDocs : TermDocs
+    {
+        protected int maxDoc;
+        protected int doc = -1;
+
+        protected AbstractAllTermDocs(int maxDoc)
+        {
+            this.maxDoc = maxDoc;
+        }
+
+        public void Seek(Term term)
+        {
+            if (term == null)
+            {
+                doc = -1;
+            }
+            else
+            {
+                throw new NotSupportedException();
+            }
+        }
+
+        public void Seek(TermEnum termEnum)
+        {
+            throw new NotSupportedException();
+        }
+
+        public int Doc()
+        {
+            return doc;
+        }
+
+        public int Freq()
+        {
+            return 1;
+        }
+
+        public bool Next()
+        {
+            return SkipTo(doc + 1);
+        }
+
+        public int Read(int[] docs, int[] freqs)
+        {
+            int length = docs.Length;
+            int i = 0;
+            while (i < length && doc < maxDoc)
+            {
+                if (!IsDeleted(doc))
+                {
+                    docs[i] = doc;
+                    freqs[i] = 1;
+                    ++i;
+                }
+                doc++;
+            }
+            return i;
+        }
+
+        public bool SkipTo(int target)
+        {
+            doc = target;
+            while (doc < maxDoc)
+            {
+                if (!IsDeleted(doc))
+                {
+                    return true;
+                }
+                doc++;
+            }
+            return false;
+        }
+
+        public void Close()
+        {
+        }
+
+        public abstract bool IsDeleted(int doc);
+    }
+}

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AllTermDocs.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/AllTermDocs.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AllTermDocs.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/AllTermDocs.cs Wed Feb 17 19:33:03 2010
@@ -21,87 +21,22 @@
 
 namespace Lucene.Net.Index
 {
-	
-	class AllTermDocs : TermDocs
+
+    class AllTermDocs : AbstractAllTermDocs
 	{
 		protected internal BitVector deletedDocs;
-		protected internal int maxDoc;
-		protected internal int doc = - 1;
-		
-		protected internal AllTermDocs(SegmentReader parent)
+				
+		protected internal AllTermDocs(SegmentReader parent) : base(parent.MaxDoc())
 		{
 			lock (parent)
 			{
 				this.deletedDocs = parent.deletedDocs;
 			}
-			this.maxDoc = parent.MaxDoc();
-		}
-		
-		public virtual void  Seek(Term term)
-		{
-			if (term == null)
-			{
-				doc = - 1;
-			}
-			else
-			{
-				throw new System.NotSupportedException();
-			}
-		}
-		
-		public virtual void  Seek(TermEnum termEnum)
-		{
-			throw new System.NotSupportedException();
-		}
-		
-		public virtual int Doc()
-		{
-			return doc;
-		}
-		
-		public virtual int Freq()
-		{
-			return 1;
-		}
-		
-		public virtual bool Next()
-		{
-			return SkipTo(doc + 1);
-		}
-		
-		public virtual int Read(int[] docs, int[] freqs)
-		{
-			int length = docs.Length;
-			int i = 0;
-			while (i < length && doc < maxDoc)
-			{
-				if (deletedDocs == null || !deletedDocs.Get(doc))
-				{
-					docs[i] = doc;
-					freqs[i] = 1;
-					++i;
-				}
-				doc++;
-			}
-			return i;
-		}
-		
-		public virtual bool SkipTo(int target)
-		{
-			doc = target;
-			while (doc < maxDoc)
-			{
-				if (deletedDocs == null || !deletedDocs.Get(doc))
-				{
-					return true;
-				}
-				doc++;
-			}
-			return false;
-		}
-		
-		public virtual void  Close()
-		{
 		}
+
+        public override bool IsDeleted(int doc)
+        {
+            return deletedDocs != null && deletedDocs.Get(doc);
+        }
 	}
 }
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/BufferedDeletes.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/BufferedDeletes.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/BufferedDeletes.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/BufferedDeletes.cs Wed Feb 17 19:33:03 2010
@@ -31,15 +31,30 @@
 	class BufferedDeletes
 	{
 		internal int numTerms;
-		internal System.Collections.Hashtable terms = new System.Collections.Hashtable();
+        internal System.Collections.IDictionary terms = null;
 		internal System.Collections.Hashtable queries = new System.Collections.Hashtable();
 		internal System.Collections.ArrayList docIDs = new System.Collections.ArrayList();
 		internal long bytesUsed;
+        internal  bool doTermSort;
+
+        public BufferedDeletes(bool doTermSort)
+        {
+            this.doTermSort = doTermSort;
+            if (doTermSort)
+            {
+                terms = new System.Collections.Generic.SortedDictionary<object, object>();
+            }
+            else
+            {
+                terms = new System.Collections.Hashtable();
+            }
+        }
+                
 		
 		// Number of documents a delete term applies to.
 		internal sealed class Num
 		{
-			private int num;
+			internal int num;
 			
 			internal Num(int num)
 			{
@@ -118,12 +133,19 @@
 			lock (this)
 			{
 				
-				System.Collections.Hashtable newDeleteTerms;
+				System.Collections.IDictionary newDeleteTerms;
 				
 				// Remap delete-by-term
 				if (terms.Count > 0)
 				{
-					newDeleteTerms = new System.Collections.Hashtable();
+                    if (doTermSort)
+                    {
+                        newDeleteTerms = new System.Collections.Generic.SortedDictionary<object, object>();
+                    }
+                    else
+                    {
+                        newDeleteTerms = new System.Collections.Hashtable();
+                    }
 					System.Collections.IEnumerator iter = new System.Collections.Hashtable(terms).GetEnumerator();
 					while (iter.MoveNext())
 					{

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/DocFieldProcessorPerThread.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/DocFieldProcessorPerThread.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
Binary files - no diff available.

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/DocumentsWriter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/DocumentsWriter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/DocumentsWriter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/DocumentsWriter.cs Wed Feb 17 19:33:03 2010
@@ -235,11 +235,11 @@
 		
 		// Deletes done after the last flush; these are discarded
 		// on abort
-		private BufferedDeletes deletesInRAM = new BufferedDeletes();
+		private BufferedDeletes deletesInRAM = new BufferedDeletes(false);
 		
 		// Deletes done before the last flush; these are still
 		// kept on abort
-		private BufferedDeletes deletesFlushed = new BufferedDeletes();
+		private BufferedDeletes deletesFlushed = new BufferedDeletes(true);
 		
 		// The max number of delete terms that can be buffered before
 		// they must be flushed to disk.
@@ -742,7 +742,8 @@
 					
 					if (infoStream != null)
 					{
-						long newSegmentSize = SegmentSize(flushState.segmentName);
+                        SegmentInfo si = new SegmentInfo(flushState.segmentName, flushState.numDocs, directory);
+                        long newSegmentSize = si.SizeInBytes();
                         System.String message = System.String.Format(nf, "  oldRAMSize={0:d} newFlushedSize={1:d} docs/MB={2:f} new/old={3:%}",
                             new System.Object[] { numBytesUsed, newSegmentSize, (numDocsInRAM / (newSegmentSize / 1024.0 / 1024.0)), (100.0 * newSegmentSize / numBytesUsed) });
 						Message(message);
@@ -1055,12 +1056,12 @@
 		{
 			lock (this)
 			{
-				return deletesInRAM.numTerms;
+				return deletesInRAM.numTerms; 
 			}
 		}
 		
 		// for testing
-		internal System.Collections.Hashtable GetBufferedDeleteTerms()
+		internal System.Collections.IDictionary GetBufferedDeleteTerms()
 		{
 			lock (this)
 			{
@@ -1239,6 +1240,19 @@
 				return any;
 			}
 		}
+
+        // used only by assert
+        private Term lastDeleteTerm;
+
+        // used only by assert
+        private bool CheckDeleteTerm(Term term) 
+        {
+            if (term != null) {
+                System.Diagnostics.Debug.Assert(lastDeleteTerm == null || term.CompareTo(lastDeleteTerm) > 0, "lastTerm=" + lastDeleteTerm + " vs term=" + term);
+            }
+            lastDeleteTerm = term;
+            return true;
+        }
 		
 		// Apply buffered delete terms, queries and docIDs to the
 		// provided reader
@@ -1250,8 +1264,11 @@
 				int docEnd = docIDStart + reader.MaxDoc();
 				bool any = false;
 				
+                System.Diagnostics.Debug.Assert(CheckDeleteTerm(null));
+
 				// Delete by term
-				System.Collections.IEnumerator iter = new System.Collections.Hashtable(deletesFlushed.terms).GetEnumerator();
+                //System.Collections.IEnumerator iter = new System.Collections.Hashtable(deletesFlushed.terms).GetEnumerator();
+				System.Collections.IEnumerator iter = deletesFlushed.terms.GetEnumerator();
 				TermDocs docs = reader.TermDocs();
 				try
 				{
@@ -1259,7 +1276,9 @@
 					{
 						System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iter.Current;
 						Term term = (Term) entry.Key;
-						
+						// LUCENE-2086: we should be iterating a TreeMap,
+                        // here, so terms better be in order:
+                        System.Diagnostics.Debug.Assert(CheckDeleteTerm(term));
 						docs.Seek(term);
 						int limit = ((BufferedDeletes.Num) entry.Value).GetNum();
 						while (docs.Next())
@@ -1472,22 +1491,6 @@
 		
 		internal System.Globalization.NumberFormatInfo nf = System.Globalization.CultureInfo.CurrentCulture.NumberFormat;
 		
-		// TODO FI: this is not flexible -- we can't hardwire
-		// extensions in here:
-		private long SegmentSize(System.String segmentName)
-		{
-			// Used only when infoStream != null
-			System.Diagnostics.Debug.Assert(infoStream != null);
-			
-			long size = directory.FileLength(segmentName + ".tii") + directory.FileLength(segmentName + ".tis") + directory.FileLength(segmentName + ".frq") + directory.FileLength(segmentName + ".prx");
-			
-			System.String normFileName = segmentName + ".nrm";
-			if (directory.FileExists(normFileName))
-				size += directory.FileLength(normFileName);
-			
-			return size;
-		}
-		
 		// Coarse estimates used to measure RAM usage of buffered deletes
 		internal const int OBJECT_HEADER_BYTES = 8;
 		internal static readonly int POINTER_NUM_BYTE;

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/IndexWriter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/IndexWriter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/IndexWriter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/IndexWriter.cs Wed Feb 17 19:33:03 2010
@@ -365,6 +365,8 @@
 		/// costly {@link #commit}.
 		/// <p/>
 		/// 
+        /// You must close the {@link IndexReader} returned by  this method once you are done using it.
+        /// 
 		/// <p/>
 		/// It's <i>near</i> real-time because there is no hard
 		/// guarantee on how quickly you can get a new reader after
@@ -385,10 +387,11 @@
 		/// deletes, etc.  This means additional resources (RAM,
 		/// file descriptors, CPU time) will be consumed.<p/>
 		/// 
-		/// <p/>For lower latency on reopening a reader, you may
-		/// want to call {@link #setMergedSegmentWarmer} to
+		/// <p/>For lower latency on reopening a reader, you should call {@link #setMergedSegmentWarmer} 
+        /// to call {@link #setMergedSegmentWarmer} to
 		/// pre-warm a newly merged segment before it's committed
-		/// to the index.<p/>
+		/// to the index. This is important for minimizing index-to-search 
+        /// delay after a large merge.
 		/// 
 		/// <p/>If an addIndexes* call is running in another thread,
 		/// then this reader will only search those segments from
@@ -441,13 +444,14 @@
 			// this method is called:
 			poolReaders = true;
 			
-			Flush(true, true, true);
+			Flush(true, true, false);
 			
 			// Prevent segmentInfos from changing while opening the
 			// reader; in theory we could do similar retry logic,
 			// just like we do when loading segments_N
 			lock (this)
 			{
+                ApplyDeletes();
 				return new ReadOnlyDirectoryReader(this, segmentInfos, termInfosIndexDivisor);
 			}
 		}
@@ -4737,10 +4741,15 @@
 			StartCommit(0, commitUserData);
 		}
 		
+        // Used only by commit, below; lock order is commitLock -> IW
+        private Object commitLock = new Object();
+
 		private void  Commit(long sizeInBytes)
 		{
-			StartCommit(sizeInBytes, null);
-			FinishCommit();
+            lock(commitLock) {
+                StartCommit(sizeInBytes, null);
+                FinishCommit();
+            }
 		}
 		
 		/// <summary> <p/>Commits all pending changes (added &amp; deleted
@@ -4792,20 +4801,31 @@
 		{
 			
 			EnsureOpen();
-			
-			if (infoStream != null)
-				Message("commit: start");
-			
-			if (autoCommit || pendingCommit == null)
-			{
-				if (infoStream != null)
-					Message("commit: now prepare");
-				PrepareCommit(commitUserData, true);
-			}
-			else if (infoStream != null)
-				Message("commit: already prepared");
-			
-			FinishCommit();
+
+            if (infoStream != null)
+            {
+                Message("commit: start");
+            }
+
+            lock (commitLock)
+            {
+                if (infoStream != null)
+                {
+                    Message("commit: enter lock");
+                }
+                if (autoCommit || pendingCommit == null)
+                {
+                    if (infoStream != null)
+                        Message("commit: now prepare");
+                    PrepareCommit(commitUserData, true);
+                }
+                else if (infoStream != null)
+                {
+                    Message("commit: already prepared");
+                }
+
+                FinishCommit();
+            }
 		}
 		
 		private void  FinishCommit()
@@ -5045,7 +5065,6 @@
 					
 					if (flushDeletes)
 					{
-						flushDeletesCount++;
 						ApplyDeletes();
 					}
 					
@@ -6048,6 +6067,7 @@
 			lock (this)
 			{
 				System.Diagnostics.Debug.Assert(TestPoint("startApplyDeletes"));
+                flushDeletesCount++;
 				SegmentInfos rollback = (SegmentInfos) segmentInfos.Clone();
 				bool success = false;
 				bool changed;
@@ -6290,6 +6310,9 @@
 		{
 			
 			System.Diagnostics.Debug.Assert(TestPoint("startStartCommit"));
+
+            // TODO: as of LUCENE-2095, we can simplify this method,
+            // since only 1 thread can be in here at once
 			
 			if (hitOOM)
 			{

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/SegmentTermEnum.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/SegmentTermEnum.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/SegmentTermEnum.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/SegmentTermEnum.cs Wed Feb 17 19:33:03 2010
@@ -125,7 +125,7 @@
 			return clone;
 		}
 		
-		internal void  Seek(long pointer, int p, Term t, TermInfo ti)
+		internal void  Seek(long pointer, long p, Term t, TermInfo ti)
 		{
 			input.Seek(pointer);
 			position = p;

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/TermInfosReader.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Index/TermInfosReader.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/TermInfosReader.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Index/TermInfosReader.cs Wed Feb 17 19:33:03 2010
@@ -189,7 +189,7 @@
 		
 		private void  SeekEnum(SegmentTermEnum enumerator, int indexOffset)
 		{
-			enumerator.Seek(indexPointers[indexOffset], (indexOffset * totalIndexInterval) - 1, indexTerms[indexOffset], indexInfos[indexOffset]);
+			enumerator.Seek(indexPointers[indexOffset], ((long)indexOffset * totalIndexInterval) - 1, indexTerms[indexOffset], indexInfos[indexOffset]);
 		}
 		
 		/// <summary>Returns the TermInfo for a Term in the set, or null. </summary>
@@ -270,30 +270,7 @@
 			}
 			return ti;
 		}
-		
-		/// <summary>Returns the nth term in the set. </summary>
-		internal Term Get(int position)
-		{
-			if (size == 0)
-				return null;
-			
-			SegmentTermEnum enumerator = GetThreadResources().termEnum;
-			if (enumerator.Term() != null && position >= enumerator.position && position < (enumerator.position + totalIndexInterval))
-				return ScanEnum(enumerator, position); // can avoid seek
-			
-			SeekEnum(enumerator, position / totalIndexInterval); // must seek
-			return ScanEnum(enumerator, position);
-		}
-		
-		private Term ScanEnum(SegmentTermEnum enumerator, int position)
-		{
-			while (enumerator.position < position)
-				if (!enumerator.Next())
-					return null;
-			
-			return enumerator.Term();
-		}
-		
+						
 		private void  EnsureIndexIsRead()
 		{
 			if (indexTerms == null)

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.csproj
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Lucene.Net.csproj?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.csproj (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Lucene.Net.csproj Wed Feb 17 19:33:03 2010
@@ -232,6 +232,7 @@
     <Compile Include="Document\SetBasedFieldSelector.cs">
       <SubType>Code</SubType>
     </Compile>
+    <Compile Include="Index\AbstractAllTermDocs.cs" />
     <Compile Include="Index\AllTermDocs.cs" />
     <Compile Include="Index\BufferedDeletes.cs" />
     <Compile Include="Index\ByteBlockPool.cs" />

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/BooleanQuery.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/BooleanQuery.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/BooleanQuery.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/BooleanQuery.cs Wed Feb 17 19:33:03 2010
@@ -671,13 +671,14 @@
             BooleanQuery other = (BooleanQuery)o;
             return (this.GetBoost() == other.GetBoost())
                     && this.clauses.Equals(other.clauses)
-                    && this.GetMinimumNumberShouldMatch() == other.GetMinimumNumberShouldMatch();
+                    && this.GetMinimumNumberShouldMatch() == other.GetMinimumNumberShouldMatch()
+                    && this.disableCoord == other.disableCoord;
 		}
 		
 		/// <summary>Returns a hash code value for this object.</summary>
 		public override int GetHashCode()
 		{
-			return BitConverter.ToInt32(BitConverter.GetBytes(GetBoost()), 0) ^ clauses.GetHashCode() + GetMinimumNumberShouldMatch();
+            return BitConverter.ToInt32(BitConverter.GetBytes(GetBoost()), 0) ^ clauses.GetHashCode() + GetMinimumNumberShouldMatch() + (disableCoord ? 17 : 0);
 		}
 	}
 }
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Collector.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/Collector.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Collector.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Collector.cs Wed Feb 17 19:33:03 2010
@@ -164,15 +164,24 @@
 		/// </param>
 		public abstract void  SetNextReader(IndexReader reader, int docBase);
 		
-		/// <summary> Returns true iff this {@link Collector} can accept documents given to
-		/// {@link #Collect(int)} out of order.
-		/// <p/>
-		/// NOTE: some collectors can work in either mode, with a more efficient
-		/// implementation for in-order docs collection. If your collector can work in
-		/// either mode, it is recommended that you create two variants of it, since
-		/// some queries work much faster if out-of-order collection is supported by a
-		/// {@link Collector}.
+		/// <summary>
+		///  * Return <code>true</code> if this collector does not
+		///  * require the matching docIDs to be delivered in int sort
+        ///  * order (smallest to largest) to {@link #collect}.
+        ///  *
+        ///  * <p/> Most Lucene Query implementations will visit
+        ///  * matching docIDs in order.  However, some queries
+        ///  * (currently limited to certain cases of {@link
+        ///  * BooleanQuery}) can achieve faster searching if the
+        ///  * <code>Collector</code> allows them to deliver the
+        ///  * docIDs out of order.
+        ///  *
+        ///  * <p/> Many collectors don't mind getting docIDs out of
+        ///  * order, so it's important to return <code>true</code>
+        ///  * here.
+        ///  *
 		/// </summary>
+		/// <returns></returns>
 		public abstract bool AcceptsDocsOutOfOrder();
 	}
 }
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Filter.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/Filter.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Filter.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Filter.cs Wed Feb 17 19:33:03 2010
@@ -23,8 +23,7 @@
 namespace Lucene.Net.Search
 {
 	
-	/// <summary>Abstract base class providing a mechanism to use a subset of an index
-	/// for restriction or permission of index search results.
+	/// <summary>Abstract base class for restricting which documents may be returned during searching.
 	/// <p/>
 	/// <b>Note:</b> In Lucene 3.0 {@link #Bits(IndexReader)} will be removed
 	/// and {@link #GetDocIdSet(IndexReader)} will be defined as abstract.
@@ -33,7 +32,13 @@
 	/// </summary>
 	[Serializable]
 	public abstract class Filter
-	{
+	{ 
+        
+
+        /// <summary>><b>NOTE:</b> See {@link #getDocIdSet(IndexReader)} for
+        /// handling of multi-segment indexes (which applies to
+        /// this method as well.
+        /// </summary
 		/// <returns> A BitSet with true for documents which should be permitted in
 		/// search results, and false for those that should not.
 		/// </returns>
@@ -45,10 +50,25 @@
 			throw new System.NotSupportedException();
 		}
 		
+        ///<summary>
+        ///  Creates a {@link DocIdSet} enumerating the documents that should be
+        ///  permitted in search results. <b>NOTE:</b> null can be
+        ///  returned if no documents are accepted by this Filter.
+        ///  <p/>
+        ///  Note: This method will be called once per segment in
+        ///  the index during searching.  The returned {@link DocIdSet}
+        ///  must refer to document IDs for that segment, not for
+        ///  the top-level reader.
+        ///   
+        ///  @param reader a {@link IndexReader} instance opened on the index currently
+        ///           searched on. Note, it is likely that the provided reader does not
+        ///           represent the whole underlying index i.e. if the index has more than
+        ///           one segment the given reader only represents a single segment.
+        ///            
+        ///</summary>
 		/// <returns> a DocIdSet that provides the documents which should be permitted or
 		/// prohibited in search results. <b>NOTE:</b> null can be returned if
 		/// no documents will be accepted by this Filter.
-		/// 
 		/// </returns>
 		/// <seealso cref="DocIdBitSet">
 		/// </seealso>

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Function/CustomScoreQuery.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/Function/CustomScoreQuery.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Function/CustomScoreQuery.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Function/CustomScoreQuery.cs Wed Feb 17 19:33:03 2010
@@ -185,8 +185,8 @@
 		/// The default computation herein is a multiplication of given scores:
 		/// <pre>
 		/// ModifiedScore = valSrcScore * valSrcScores[0] * valSrcScores[1] * ...
-		/// </pre>
-		/// 
+		/// </pre>        /// 
+        /// NOTE: the doc is relative to the current reader, last passed to <see cref="SetNextReader"/>
 		/// </summary>
 		/// <param name="doc">id of scored doc. 
 		/// </param>
@@ -227,7 +227,7 @@
 		/// <pre>
 		/// ModifiedScore = subQueryScore * valSrcScore
 		/// </pre>
-		/// 
+		/// NOTE: the doc is relative to the current reader, last passed to <see cref="SetNextReader"/>
 		/// </summary>
 		/// <param name="doc">id of scored doc. 
 		/// </param>
@@ -241,6 +241,15 @@
 		{
 			return subQueryScore * valSrcScore;
 		}
+
+        /// <summary>
+        /// Called when the scoring switches to another reader.
+        /// </summary>
+        /// <param name="reader">The next IndexReader</param>
+        public virtual void SetNextReader(IndexReader reader)
+        {
+
+        }
 		
 		/// <summary> Explain the custom score.
 		/// Whenever overriding {@link #CustomScore(int, float, float[])}, 
@@ -486,6 +495,7 @@
 				this.valSrcScorers = valSrcScorers;
 				this.reader = reader;
 				this.vScores = new float[valSrcScorers.Length];
+                this.Enclosing_Instance.SetNextReader(reader);
 			}
 			
 			/// <deprecated> use {@link #NextDoc()} instead. 

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/FuzzyTermEnum.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/FuzzyTermEnum.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/FuzzyTermEnum.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/FuzzyTermEnum.cs Wed Feb 17 19:33:03 2010
@@ -222,88 +222,87 @@
 		/// <returns> the similarity,  0.0 or less indicates that it matches less than the required
 		/// threshold and 1.0 indicates that the text and target are identical
 		/// </returns>
-		private float Similarity(System.String target)
-		{
-			lock (this)
-			{
-				int m = target.Length;
-				int n = text.Length;
-				if (n == 0)
-				{
-					//we don't have anything to compare.  That means if we just add
-					//the letters for m we get the new word
-					return prefix.Length == 0?0.0f:1.0f - ((float) m / prefix.Length);
-				}
-				if (m == 0)
-				{
-					return prefix.Length == 0?0.0f:1.0f - ((float) n / prefix.Length);
-				}
-				
-				int maxDistance = GetMaxDistance(m);
-				
-				if (maxDistance < System.Math.Abs(m - n))
-				{
-					//just adding the characters of m to n or vice-versa results in
-					//too many edits
-					//for example "pre" length is 3 and "prefixes" length is 8.  We can see that
-					//given this optimal circumstance, the edit distance cannot be less than 5.
-					//which is 8-3 or more precisesly Math.abs(3-8).
-					//if our maximum edit distance is 4, then we can discard this word
-					//without looking at it.
-					return 0.0f;
-				}
-				
-				//let's make sure we have enough room in our array to do the distance calculations.
-				if (d[0].Length <= m)
-				{
-					GrowDistanceArray(m);
-				}
-				
-				// init matrix d
-				for (int i = 0; i <= n; i++)
-					d[i][0] = i;
-				for (int j = 0; j <= m; j++)
-					d[0][j] = j;
-				
-				// start computing edit distance
-				for (int i = 1; i <= n; i++)
-				{
-					int bestPossibleEditDistance = m;
-					char s_i = text[i - 1];
-					for (int j = 1; j <= m; j++)
-					{
-						if (s_i != target[j - 1])
-						{
-							d[i][j] = Min(d[i - 1][j], d[i][j - 1], d[i - 1][j - 1]) + 1;
-						}
-						else
-						{
-							d[i][j] = Min(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1]);
-						}
-						bestPossibleEditDistance = System.Math.Min(bestPossibleEditDistance, d[i][j]);
-					}
-					
-					//After calculating row i, the best possible edit distance
-					//can be found by found by finding the smallest value in a given column.
-					//If the bestPossibleEditDistance is greater than the max distance, abort.
-					
-					if (i > maxDistance && bestPossibleEditDistance > maxDistance)
-					{
-						//equal is okay, but not greater
-						//the closest the target can be to the text is just too far away.
-						//this target is leaving the party early.
-						return 0.0f;
-					}
-				}
-				
-				// this will return less than 0.0 when the edit distance is
-				// greater than the number of characters in the shorter word.
-				// but this was the formula that was previously used in FuzzyTermEnum,
-				// so it has not been changed (even though minimumSimilarity must be
-				// greater than 0.0)
-				return 1.0f - ((float) d[n][m] / (float) (prefix.Length + System.Math.Min(n, m)));
-			}
-		}
+        private float Similarity(System.String target)
+        {
+
+            int m = target.Length;
+            int n = text.Length;
+            if (n == 0)
+            {
+                //we don't have anything to compare.  That means if we just add
+                //the letters for m we get the new word
+                return prefix.Length == 0 ? 0.0f : 1.0f - ((float)m / prefix.Length);
+            }
+            if (m == 0)
+            {
+                return prefix.Length == 0 ? 0.0f : 1.0f - ((float)n / prefix.Length);
+            }
+
+            int maxDistance = GetMaxDistance(m);
+
+            if (maxDistance < System.Math.Abs(m - n))
+            {
+                //just adding the characters of m to n or vice-versa results in
+                //too many edits
+                //for example "pre" length is 3 and "prefixes" length is 8.  We can see that
+                //given this optimal circumstance, the edit distance cannot be less than 5.
+                //which is 8-3 or more precisesly Math.abs(3-8).
+                //if our maximum edit distance is 4, then we can discard this word
+                //without looking at it.
+                return 0.0f;
+            }
+
+            //let's make sure we have enough room in our array to do the distance calculations.
+            if (d[0].Length <= m)
+            {
+                GrowDistanceArray(m);
+            }
+
+            // init matrix d
+            for (int i = 0; i <= n; i++)
+                d[i][0] = i;
+            for (int j = 0; j <= m; j++)
+                d[0][j] = j;
+
+            // start computing edit distance
+            for (int i = 1; i <= n; i++)
+            {
+                int bestPossibleEditDistance = m;
+                char s_i = text[i - 1];
+                for (int j = 1; j <= m; j++)
+                {
+                    if (s_i != target[j - 1])
+                    {
+                        d[i][j] = Min(d[i - 1][j], d[i][j - 1], d[i - 1][j - 1]) + 1;
+                    }
+                    else
+                    {
+                        d[i][j] = Min(d[i - 1][j] + 1, d[i][j - 1] + 1, d[i - 1][j - 1]);
+                    }
+                    bestPossibleEditDistance = System.Math.Min(bestPossibleEditDistance, d[i][j]);
+                }
+
+                //After calculating row i, the best possible edit distance
+                //can be found by found by finding the smallest value in a given column.
+                //If the bestPossibleEditDistance is greater than the max distance, abort.
+
+                if (i > maxDistance && bestPossibleEditDistance > maxDistance)
+                {
+                    //equal is okay, but not greater
+                    //the closest the target can be to the text is just too far away.
+                    //this target is leaving the party early.
+                    return 0.0f;
+                }
+            }
+
+            // this will return less than 0.0 when the edit distance is
+            // greater than the number of characters in the shorter word.
+            // but this was the formula that was previously used in FuzzyTermEnum,
+            // so it has not been changed (even though minimumSimilarity must be
+            // greater than 0.0)
+            return 1.0f - ((float)d[n][m] / (float)(prefix.Length + System.Math.Min(n, m)));
+
+        }
 		
 		/// <summary> Grow the second dimension of the array, so that we can calculate the
 		/// Levenshtein difference.

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/NumericRangeQuery.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/NumericRangeQuery.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/NumericRangeQuery.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/NumericRangeQuery.cs Wed Feb 17 19:33:03 2010
@@ -152,7 +152,7 @@
 	/// 
 	/// </since>
 	[Serializable]
-	public sealed class NumericRangeQuery:MultiTermQuery
+	public sealed class NumericRangeQuery:MultiTermQuery,System.Runtime.Serialization.ISerializable
 	{
 		
 		private NumericRangeQuery(System.String field, int precisionStep, int valSize, System.ValueType min, System.ValueType max, bool minInclusive, bool maxInclusive)
@@ -355,6 +355,48 @@
 				hash += (max.GetHashCode() ^ 0x733fa5fe);
 			return hash + (minInclusive.GetHashCode() ^ 0x14fa55fb) + (maxInclusive.GetHashCode() ^ 0x733fa5fe);
 		}
+
+         // field must be interned after reading from stream
+        //private void ReadObject(java.io.ObjectInputStream in) 
+        //{
+        //    in.defaultReadObject();
+        //    field = StringHelper.intern(field);
+        //}
+
+
+        /// <summary>
+        /// Lucene.Net specific. Needed for Serialization
+        /// </summary>
+        /// <param name="info"></param>
+        /// <param name="context"></param>
+        public void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
+        {
+            info.AddValue("precisionStep", precisionStep);
+            info.AddValue("valSize", valSize);
+            info.AddValue("min", min);
+            info.AddValue("max", max);
+            info.AddValue("minInclusive", minInclusive);
+            info.AddValue("maxInclusive", maxInclusive);
+
+            info.AddValue("field", field);
+        }
+
+        /// <summary>
+        /// Lucene.Net specific. Needed for deserialization
+        /// </summary>
+        /// <param name="info"></param>
+        /// <param name="context"></param>
+        protected NumericRangeQuery(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
+        {
+            precisionStep   = (int)info.GetValue("precisionStep", typeof(int));
+            valSize         = (int)info.GetValue("valSize", typeof(int));
+            min             = (System.ValueType)info.GetValue("min", typeof(System.ValueType));
+            max             = (System.ValueType)info.GetValue("max", typeof(System.ValueType));
+            minInclusive    = (bool)info.GetValue("minInclusive", typeof(bool));
+            maxInclusive    = (bool)info.GetValue("maxInclusive", typeof(bool));
+            
+            field           = StringHelper.Intern((string)info.GetValue("field", typeof(string)));
+        }
 		
 		// members (package private, to be also fast accessible by NumericRangeTermEnum)
 		internal System.String field;

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MaxPayloadFunction.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/Payloads/MaxPayloadFunction.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MaxPayloadFunction.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MaxPayloadFunction.cs Wed Feb 17 19:33:03 2010
@@ -32,7 +32,14 @@
 	{
 		public override float CurrentScore(int docId, System.String field, int start, int end, int numPayloadsSeen, float currentScore, float currentPayloadScore)
 		{
-			return System.Math.Max(currentPayloadScore, currentScore);
+            if (numPayloadsSeen == 0)
+            {
+                return currentPayloadScore;
+            }
+            else
+            {
+                return System.Math.Max(currentPayloadScore, currentScore);
+            }
 		}
 		
 		public override float DocScore(int docId, System.String field, int numPayloadsSeen, float payloadScore)

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MinPayloadFunction.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/Payloads/MinPayloadFunction.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MinPayloadFunction.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/Payloads/MinPayloadFunction.cs Wed Feb 17 19:33:03 2010
@@ -30,7 +30,14 @@
 		
 		public override float CurrentScore(int docId, System.String field, int start, int end, int numPayloadsSeen, float currentScore, float currentPayloadScore)
 		{
-			return System.Math.Min(currentPayloadScore, currentScore);
+            if (numPayloadsSeen == 0)
+            {
+                return currentPayloadScore;
+            }
+            else
+            {
+                return System.Math.Min(currentPayloadScore, currentScore);
+            }
 		}
 		
 		public override float DocScore(int docId, System.String field, int numPayloadsSeen, float payloadScore)

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/SortField.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/SortField.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/SortField.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/SortField.cs Wed Feb 17 19:33:03 2010
@@ -39,7 +39,7 @@
 	/// <seealso cref="Sort">
 	/// </seealso>
 	[Serializable]
-	public class SortField
+	public class SortField: System.Runtime.Serialization.ISerializable
 	{
 		
 		/// <summary>Sort by document score (relevancy).  Sort values are Float and higher
@@ -553,6 +553,53 @@
 			return hash;
 		}
 		
+        
+       //// field must be interned after reading from stream
+       // private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+       //  in.defaultReadObject();
+       //  if (field != null)
+       //    field = StringHelper.intern(field);
+       // }
+
+        /// <summary>
+        /// Lucene.Net specific. Needed for Serialization
+        /// </summary>
+        /// <param name="info"></param>
+        /// <param name="context"></param>
+        public void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
+        {
+            //Should we add other fields as in NumericRangeQuery? {{DIGY}}
+
+            info.AddValue("type", type);
+            info.AddValue("useLegacy", useLegacy);
+            info.AddValue("reverse", reverse);
+            info.AddValue("locale", locale);
+            info.AddValue("comparatorSource", comparatorSource);
+            info.AddValue("factory", factory);
+            info.AddValue("parser", parser);
+                        
+            info.AddValue("field", field);
+        }
+
+        /// <summary>
+        /// Lucene.Net specific. Needed for deserialization
+        /// </summary>
+        /// <param name="info"></param>
+        /// <param name="context"></param>
+        protected SortField(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)
+        {
+            //Should we add other fields as in NumericRangeQuery? {{DIGY}}
+
+            type        = (int)info.GetValue("type", typeof(int));
+            useLegacy   = (bool)info.GetValue("useLegacy", typeof(bool));
+            reverse     = (bool)info.GetValue("reverse", typeof(bool));
+            locale      = (System.Globalization.CultureInfo)info.GetValue("locale", typeof(System.Globalization.CultureInfo));
+            comparatorSource = (FieldComparatorSource)info.GetValue("comparatorSource", typeof(FieldComparatorSource));
+            factory          = (SortComparatorSource)info.GetValue("factory", typeof(FieldComparatorSource));
+            parser           = (Parser)info.GetValue("parser", typeof(Parser));
+                        
+            field       = StringHelper.Intern((string)info.GetValue("field", typeof(string)));
+        }
 		
 		/// <summary>Returns the {@link FieldComparator} to use for
 		/// sorting.

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopDocs.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/TopDocs.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopDocs.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopDocs.cs Wed Feb 17 19:33:03 2010
@@ -20,22 +20,23 @@
 namespace Lucene.Net.Search
 {
 	
-	/// <summary>Expert: Returned by low-level search implementations.</summary>
-	/// <seealso cref="Searcher.Search(Query,Filter,int)">
-	/// </seealso>
+	/// <summary> Represents hits returned by {@link
+    /// * Searcher#search(Query,Filter,int)} and {@link
+    /// * Searcher#search(Query,int)
+    /// </summary>
 	[Serializable]
 	public class TopDocs
 	{
-		/// <summary>Expert: The total number of hits for the query.</summary>
+		/// <summary>The total number of hits for the query.</summary>
 		/// <seealso cref="Hits.Length()">
 		/// </seealso>
 		public int totalHits;
-		/// <summary>Expert: The top hits for the query. </summary>
+		/// <summary>The top hits for the query. </summary>
 		public ScoreDoc[] scoreDocs;
-		/// <summary>Expert: Stores the maximum score value encountered, needed for normalizing. </summary>
+		/// <summary>Stores the maximum score value encountered, needed for normalizing. </summary>
 		private float maxScore;
 		
-		/// <summary> Expert: Returns the maximum score value encountered. Note that in case
+		/// <summary>Returns the maximum score value encountered. Note that in case
 		/// scores are not tracked, this returns {@link Float#NaN}.
 		/// </summary>
 		public virtual float GetMaxScore()
@@ -43,18 +44,18 @@
 			return maxScore;
 		}
 		
-		/// <summary>Expert: Sets the maximum score value encountered. </summary>
+		/// <summary>Sets the maximum score value encountered. </summary>
 		public virtual void  SetMaxScore(float maxScore)
 		{
 			this.maxScore = maxScore;
 		}
 		
-		/// <summary>Expert: Constructs a TopDocs with a default maxScore=Float.NaN. </summary>
+		/// <summary>Constructs a TopDocs with a default maxScore=Float.NaN. </summary>
 		internal TopDocs(int totalHits, ScoreDoc[] scoreDocs):this(totalHits, scoreDocs, System.Single.NaN)
 		{
 		}
 		
-		/// <summary>Expert: Constructs a TopDocs.</summary>
+		/// <summary></summary>
 		public TopDocs(int totalHits, ScoreDoc[] scoreDocs, float maxScore)
 		{
 			this.totalHits = totalHits;

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopFieldDocs.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Search/TopFieldDocs.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopFieldDocs.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Search/TopFieldDocs.cs Wed Feb 17 19:33:03 2010
@@ -20,17 +20,9 @@
 namespace Lucene.Net.Search
 {
 	
-	/// <summary> Expert: Returned by low-level sorted search implementations.
-	/// 
-	/// <p/>Created: Feb 12, 2004 8:58:46 AM 
-	/// 
+	/// <summary>
+	/// Represents hits returned by {@link Searcher#search(Query,Filter,int,Sort)}.
 	/// </summary>
-	/// <since>   lucene 1.4
-	/// </since>
-	/// <version>  $Id: TopFieldDocs.java 747745 2009-02-25 11:02:57Z mikemccand $
-	/// </version>
-	/// <seealso cref="Searcher.Search(Query,Filter,int,Sort)">
-	/// </seealso>
 	[Serializable]
 	public class TopFieldDocs:TopDocs
 	{

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/NIOFSDirectory.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Store/NIOFSDirectory.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/NIOFSDirectory.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/NIOFSDirectory.cs Wed Feb 17 19:33:03 2010
@@ -16,223 +16,252 @@
  */
 
 using System;
-
 namespace Lucene.Net.Store
 {
-	
-	/// <summary> An {@link FSDirectory} implementation that uses
-	/// java.nio's FileChannel's positional read, which allows
-	/// multiple threads to read from the same file without
-	/// synchronizing.
-	/// 
-	/// <p/>This class only uses FileChannel when reading; writing
-	/// is achieved with {@link SimpleFSDirectory.SimpleFSIndexOutput}.
-	/// 
-	/// <p/><b>NOTE</b>: NIOFSDirectory is not recommended on Windows because of a bug
-	/// in how FileChannel.read is implemented in Sun's JRE.
-	/// Inside of the implementation the position is apparently
-	/// synchronized.  See <a
-	/// href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6265734">here</a>
-	/// for details.
-	/// </summary>
-	public class NIOFSDirectory:FSDirectory
-	{
-		
-		/// <summary>Create a new NIOFSDirectory for the named location.
-		/// 
-		/// </summary>
-		/// <param name="path">the path of the directory
-		/// </param>
-		/// <param name="lockFactory">the lock factory to use, or null for the default.
-		/// </param>
-		/// <throws>  IOException </throws>
-		[System.Obsolete("Use the constructor that takes a DirectoryInfo, this will be removed in the 3.0 release")]
-		public NIOFSDirectory(System.IO.FileInfo path, LockFactory lockFactory):base(new System.IO.DirectoryInfo(path.FullName), lockFactory)
-		{
-		}
+    /// <summary>
+    /// Not implemented. Waiting for volunteers.
+    /// </summary>
+    public class NIOFSDirectory : Lucene.Net.Store.FSDirectory
+    {
+        public NIOFSDirectory()
+        {
+            throw new System.NotImplementedException("Waiting for volunteers to implement this class");
 
-        /// <summary>Create a new NIOFSDirectory for the named location.
-        /// 
-        /// </summary>
-        /// <param name="path">the path of the directory
-        /// </param>
-        /// <param name="lockFactory">the lock factory to use, or null for the default.
-        /// </param>
-        /// <throws>  IOException </throws>
-        public NIOFSDirectory(System.IO.DirectoryInfo path, LockFactory lockFactory) : base(path, lockFactory)
+        }
+        public NIOFSDirectory(System.IO.DirectoryInfo dir,LockFactory lockFactory)
         {
         }
-		
-		/// <summary>Create a new NIOFSDirectory for the named location and the default lock factory.
-		/// 
-		/// </summary>
-		/// <param name="path">the path of the directory
-		/// </param>
-		/// <throws>  IOException </throws>
-		[System.Obsolete("Use the constructor that takes a DirectoryInfo, this will be removed in the 3.0 release")]
-		public NIOFSDirectory(System.IO.FileInfo path):base(new System.IO.DirectoryInfo(path.FullName), null)
-		{
-		}
 
-        /// <summary>Create a new NIOFSDirectory for the named location and the default lock factory.
-        /// 
+        /// <summary>
+        /// Not implemented. Waiting for volunteers.
         /// </summary>
-        /// <param name="path">the path of the directory
-        /// </param>
-        /// <throws>  IOException </throws>
-        public NIOFSDirectory(System.IO.DirectoryInfo path) : base(path, null)
+        public class NIOFSIndexInput
         {
+            public NIOFSIndexInput()
+            {
+                throw new System.NotImplementedException("Waiting for volunteers to implement this class");
+            }
         }
-		
-		// back compatibility so FSDirectory can instantiate via reflection
-		/// <deprecated> 
-		/// </deprecated>
-        [Obsolete]
-		internal NIOFSDirectory()
-		{
-		}
-		
-		/// <summary>Creates an IndexInput for the file with the given name. </summary>
-		public override IndexInput OpenInput(System.String name, int bufferSize)
-		{
-			EnsureOpen();
-			return new NIOFSIndexInput(new System.IO.FileInfo(System.IO.Path.Combine(GetFile().FullName, name)), bufferSize, GetReadChunkSize());
-		}
-		
-		/// <summary>Creates an IndexOutput for the file with the given name. </summary>
-		public override IndexOutput CreateOutput(System.String name)
-		{
-			InitOutput(name);
-			return new SimpleFSDirectory.SimpleFSIndexOutput(new System.IO.FileInfo(System.IO.Path.Combine(directory.FullName, name)));
-		}
-		
-		public /*protected internal*/ class NIOFSIndexInput:SimpleFSDirectory.SimpleFSIndexInput
-		{
-			
-			private System.IO.MemoryStream byteBuf; // wraps the buffer for NIO
-			
-			private byte[] otherBuffer;
-			private System.IO.MemoryStream otherByteBuf;
-			
-			internal System.IO.BinaryReader channel;
-			
-			/// <deprecated> Please use ctor taking chunkSize 
-			/// </deprecated>
-            [Obsolete("Please use ctor taking chunkSize")]
-			public NIOFSIndexInput(System.IO.FileInfo path, int bufferSize):this(path, bufferSize, FSDirectory.DEFAULT_READ_CHUNK_SIZE)
-			{
-			}
-			
-			public NIOFSIndexInput(System.IO.FileInfo path, int bufferSize, int chunkSize):base(path, bufferSize, chunkSize)
-			{
-				channel = (System.IO.BinaryReader) file;
-			}
-			
-			protected internal override void  NewBuffer(byte[] newBuffer)
-			{
-				base.NewBuffer(newBuffer);
-				// {{Aroush-2.9}} byteBuf = ByteBuffer.wrap(newBuffer);
-                System.Diagnostics.Debug.Fail("Port issue:", "byteBuf = ByteBuffer.wrap(newBuffer)"); // {{Aroush-2.9}}
-			}
-			
-			public override void  Close()
-			{
-				if (!isClone && file.isOpen)
-				{
-					// Close the channel & file
-					try
-					{
-						channel.Close();
-					}
-					finally
-					{
-						file.Close();
-					}
-				}
-			}
+    }
+}
+
+
+//namespace Lucene.Net.Store
+//{
+	
+//    /// <summary> An {@link FSDirectory} implementation that uses
+//    /// java.nio's FileChannel's positional read, which allows
+//    /// multiple threads to read from the same file without
+//    /// synchronizing.
+//    /// 
+//    /// <p/>This class only uses FileChannel when reading; writing
+//    /// is achieved with {@link SimpleFSDirectory.SimpleFSIndexOutput}.
+//    /// 
+//    /// <p/><b>NOTE</b>: NIOFSDirectory is not recommended on Windows because of a bug
+//    /// in how FileChannel.read is implemented in Sun's JRE.
+//    /// Inside of the implementation the position is apparently
+//    /// synchronized.  See <a
+//    /// href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6265734">here</a>
+//    /// for details.
+//    /// </summary>
+//    public class NIOFSDirectory:FSDirectory
+//    {
+		
+//        /// <summary>Create a new NIOFSDirectory for the named location.
+//        /// 
+//        /// </summary>
+//        /// <param name="path">the path of the directory
+//        /// </param>
+//        /// <param name="lockFactory">the lock factory to use, or null for the default.
+//        /// </param>
+//        /// <throws>  IOException </throws>
+//        [System.Obsolete("Use the constructor that takes a DirectoryInfo, this will be removed in the 3.0 release")]
+//        public NIOFSDirectory(System.IO.FileInfo path, LockFactory lockFactory):base(new System.IO.DirectoryInfo(path.FullName), lockFactory)
+//        {
+//        }
+
+//        /// <summary>Create a new NIOFSDirectory for the named location.
+//        /// 
+//        /// </summary>
+//        /// <param name="path">the path of the directory
+//        /// </param>
+//        /// <param name="lockFactory">the lock factory to use, or null for the default.
+//        /// </param>
+//        /// <throws>  IOException </throws>
+//        public NIOFSDirectory(System.IO.DirectoryInfo path, LockFactory lockFactory) : base(path, lockFactory)
+//        {
+//        }
+		
+//        /// <summary>Create a new NIOFSDirectory for the named location and the default lock factory.
+//        /// 
+//        /// </summary>
+//        /// <param name="path">the path of the directory
+//        /// </param>
+//        /// <throws>  IOException </throws>
+//        [System.Obsolete("Use the constructor that takes a DirectoryInfo, this will be removed in the 3.0 release")]
+//        public NIOFSDirectory(System.IO.FileInfo path):base(new System.IO.DirectoryInfo(path.FullName), null)
+//        {
+//        }
+
+//        /// <summary>Create a new NIOFSDirectory for the named location and the default lock factory.
+//        /// 
+//        /// </summary>
+//        /// <param name="path">the path of the directory
+//        /// </param>
+//        /// <throws>  IOException </throws>
+//        public NIOFSDirectory(System.IO.DirectoryInfo path) : base(path, null)
+//        {
+//        }
+		
+//        // back compatibility so FSDirectory can instantiate via reflection
+//        /// <deprecated> 
+//        /// </deprecated>
+//        [Obsolete]
+//        internal NIOFSDirectory()
+//        {
+//        }
+		
+//        /// <summary>Creates an IndexInput for the file with the given name. </summary>
+//        public override IndexInput OpenInput(System.String name, int bufferSize)
+//        {
+//            EnsureOpen();
+//            return new NIOFSIndexInput(new System.IO.FileInfo(System.IO.Path.Combine(GetFile().FullName, name)), bufferSize, GetReadChunkSize());
+//        }
+		
+//        /// <summary>Creates an IndexOutput for the file with the given name. </summary>
+//        public override IndexOutput CreateOutput(System.String name)
+//        {
+//            InitOutput(name);
+//            return new SimpleFSDirectory.SimpleFSIndexOutput(new System.IO.FileInfo(System.IO.Path.Combine(directory.FullName, name)));
+//        }
+		
+//        public /*protected internal*/ class NIOFSIndexInput:SimpleFSDirectory.SimpleFSIndexInput
+//        {
+			
+//            private System.IO.MemoryStream byteBuf; // wraps the buffer for NIO
+			
+//            private byte[] otherBuffer;
+//            private System.IO.MemoryStream otherByteBuf;
+			
+//            internal System.IO.BinaryReader channel;
+			
+//            /// <deprecated> Please use ctor taking chunkSize 
+//            /// </deprecated>
+//            [Obsolete("Please use ctor taking chunkSize")]
+//            public NIOFSIndexInput(System.IO.FileInfo path, int bufferSize):this(path, bufferSize, FSDirectory.DEFAULT_READ_CHUNK_SIZE)
+//            {
+//            }
+			
+//            public NIOFSIndexInput(System.IO.FileInfo path, int bufferSize, int chunkSize):base(path, bufferSize, chunkSize)
+//            {
+//                channel = (System.IO.BinaryReader) file;
+//            }
+			
+//            protected internal override void  NewBuffer(byte[] newBuffer)
+//            {
+//                base.NewBuffer(newBuffer);
+//                // {{Aroush-2.9}} byteBuf = ByteBuffer.wrap(newBuffer);
+//                System.Diagnostics.Debug.Fail("Port issue:", "byteBuf = ByteBuffer.wrap(newBuffer)"); // {{Aroush-2.9}}
+//            }
+			
+//            public override void  Close()
+//            {
+//                if (!isClone && file.isOpen)
+//                {
+//                    // Close the channel & file
+//                    try
+//                    {
+//                        channel.Close();
+//                    }
+//                    finally
+//                    {
+//                        file.Close();
+//                    }
+//                }
+//            }
 			
-			public override void  ReadInternal(byte[] b, int offset, int len)
-			{
+//            public override void  ReadInternal(byte[] b, int offset, int len)
+//            {
 				
-				System.IO.MemoryStream bb;
+//                System.IO.MemoryStream bb;
 				
-				// Determine the ByteBuffer we should use
-				if (b == buffer && 0 == offset)
-				{
-					// Use our own pre-wrapped byteBuf:
-					System.Diagnostics.Debug.Assert(byteBuf != null);
-					byteBuf.Position = 0;
-                    byteBuf.Capacity = len;
-					bb = byteBuf;
-				}
-				else
-				{
-					if (offset == 0)
-					{
-						if (otherBuffer != b)
-						{
-							// Now wrap this other buffer; with compound
-							// file, we are repeatedly called with its
-							// buffer, so we wrap it once and then re-use it
-							// on subsequent calls
-							otherBuffer = b;
-							// otherByteBuf = ByteBuffer.wrap(b); {{Aroush-2.9}}
-                            System.Diagnostics.Debug.Fail("Port issue:", "otherByteBuf = ByteBuffer.wrap(b)"); // {{Aroush-2.9}}
-						}
-						else
-							otherByteBuf.Position = 0;
-						otherByteBuf.Capacity = len;
-						bb = otherByteBuf;
-					}
-					else
-					{
-						// Always wrap when offset != 0
-                        bb = null; // bb = ByteBuffer.wrap(b, offset, len); {{Aroush-2.9}}
-                        System.Diagnostics.Debug.Fail("Port issue:", "bb = ByteBuffer.wrap(b, offset, len)"); // {{Aroush-2.9}}
-					}
-				}
+//                // Determine the ByteBuffer we should use
+//                if (b == buffer && 0 == offset)
+//                {
+//                    // Use our own pre-wrapped byteBuf:
+//                    System.Diagnostics.Debug.Assert(byteBuf != null);
+//                    byteBuf.Position = 0;
+//                    byteBuf.Capacity = len;
+//                    bb = byteBuf;
+//                }
+//                else
+//                {
+//                    if (offset == 0)
+//                    {
+//                        if (otherBuffer != b)
+//                        {
+//                            // Now wrap this other buffer; with compound
+//                            // file, we are repeatedly called with its
+//                            // buffer, so we wrap it once and then re-use it
+//                            // on subsequent calls
+//                            otherBuffer = b;
+//                            // otherByteBuf = ByteBuffer.wrap(b); {{Aroush-2.9}}
+//                            System.Diagnostics.Debug.Fail("Port issue:", "otherByteBuf = ByteBuffer.wrap(b)"); // {{Aroush-2.9}}
+//                        }
+//                        else
+//                            otherByteBuf.Position = 0;
+//                        otherByteBuf.Capacity = len;
+//                        bb = otherByteBuf;
+//                    }
+//                    else
+//                    {
+//                        // Always wrap when offset != 0
+//                        bb = null; // bb = ByteBuffer.wrap(b, offset, len); {{Aroush-2.9}}
+//                        System.Diagnostics.Debug.Fail("Port issue:", "bb = ByteBuffer.wrap(b, offset, len)"); // {{Aroush-2.9}}
+//                    }
+//                }
 				
-				int readOffset = (int) bb.Position;
-				int readLength = bb.Capacity - readOffset;
-				System.Diagnostics.Debug.Assert(readLength == len);
+//                int readOffset = (int) bb.Position;
+//                int readLength = bb.Capacity - readOffset;
+//                System.Diagnostics.Debug.Assert(readLength == len);
 				
-				long pos = GetFilePointer();
+//                long pos = GetFilePointer();
 				
-				try
-				{
-					while (readLength > 0)
-					{
-						int limit;
-						if (readLength > chunkSize)
-						{
-							// LUCENE-1566 - work around JVM Bug by breaking
-							// very large reads into chunks
-							limit = readOffset + chunkSize;
-						}
-						else
-						{
-							limit = readOffset + readLength;
-						}
-						bb.Capacity = limit;
-                        int i = -1; // int i = channel.Read(bb, pos, limit); // {{Aroush-2.9}} must read from 'channel' into 'bb'
-                        System.Diagnostics.Debug.Fail("Port issue:", "channel.Read(bb, pos, limit)"); // {{Aroush-2.9}}
-						if (i == - 1)
-						{
-							throw new System.IO.IOException("read past EOF");
-						}
-						pos += i;
-						readOffset += i;
-						readLength -= i;
-					}
-				}
-				catch (System.OutOfMemoryException e)
-				{
-					// propagate OOM up and add a hint for 32bit VM Users hitting the bug
-					// with a large chunk size in the fast path.
-					System.OutOfMemoryException outOfMemoryError = new System.OutOfMemoryException("OutOfMemoryError likely caused by the Sun VM Bug described in " + "https://issues.apache.org/jira/browse/LUCENE-1566; try calling FSDirectory.setReadChunkSize " + "with a a value smaller than the current chunk size (" + chunkSize + ")", e);
-					throw outOfMemoryError;
-				}
-			}
-		}
-	}
-}
\ No newline at end of file
+//                try
+//                {
+//                    while (readLength > 0)
+//                    {
+//                        int limit;
+//                        if (readLength > chunkSize)
+//                        {
+//                            // LUCENE-1566 - work around JVM Bug by breaking
+//                            // very large reads into chunks
+//                            limit = readOffset + chunkSize;
+//                        }
+//                        else
+//                        {
+//                            limit = readOffset + readLength;
+//                        }
+//                        bb.Capacity = limit;
+//                        int i = -1; // int i = channel.Read(bb, pos, limit); // {{Aroush-2.9}} must read from 'channel' into 'bb'
+//                        System.Diagnostics.Debug.Fail("Port issue:", "channel.Read(bb, pos, limit)"); // {{Aroush-2.9}}
+//                        if (i == - 1)
+//                        {
+//                            throw new System.IO.IOException("read past EOF");
+//                        }
+//                        pos += i;
+//                        readOffset += i;
+//                        readLength -= i;
+//                    }
+//                }
+//                catch (System.OutOfMemoryException e)
+//                {
+//                    // propagate OOM up and add a hint for 32bit VM Users hitting the bug
+//                    // with a large chunk size in the fast path.
+//                    System.OutOfMemoryException outOfMemoryError = new System.OutOfMemoryException("OutOfMemoryError likely caused by the Sun VM Bug described in " + "https://issues.apache.org/jira/browse/LUCENE-1566; try calling FSDirectory.setReadChunkSize " + "with a a value smaller than the current chunk size (" + chunkSize + ")", e);
+//                    throw outOfMemoryError;
+//                }
+//            }
+//        }
+//    }
+//}
\ No newline at end of file

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMDirectory.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Store/RAMDirectory.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMDirectory.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMDirectory.cs Wed Feb 17 19:33:03 2010
@@ -239,7 +239,7 @@
 				{
 					fileMap.Remove(name);
 					file.directory = null;
-					sizeInBytes -= file.sizeInBytes; // updates to RAMFile.sizeInBytes synchronized on directory
+					sizeInBytes -= file.sizeInBytes; 
 				}
 				else
 					throw new System.IO.FileNotFoundException(name);

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMFile.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Store/RAMFile.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMFile.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Store/RAMFile.cs Wed Feb 17 19:33:03 2010
@@ -29,7 +29,7 @@
 		private System.Collections.ArrayList buffers = new System.Collections.ArrayList();
 		internal long length;
 		internal RAMDirectory directory;
-		internal long sizeInBytes; // Only maintained if in a directory; updates synchronized on directory
+		internal long sizeInBytes; 
 		
 		// This is publicly modifiable via Directory.touchFile(), so direct access not supported
 		private long lastModified = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
@@ -80,21 +80,22 @@
 		
 		internal byte[] AddBuffer(int size)
 		{
-			lock (this)
-			{
-				byte[] buffer = NewBuffer(size);
-				if (directory != null)
-					lock (directory)
-					{
-						// Ensure addition of buffer and adjustment to directory size are atomic wrt directory
-						buffers.Add(buffer);
-						directory.sizeInBytes += size;
-						sizeInBytes += size;
-					}
-				else
-					buffers.Add(buffer);
-				return buffer;
-			}
+            byte[] buffer = NewBuffer(size);
+            lock (this)
+            {
+                buffers.Add(buffer);
+                sizeInBytes += size;
+            }
+
+            if (directory != null)
+            {
+                lock (directory) //{{DIGY}} what if directory gets null in the mean time?
+                {
+                    directory.sizeInBytes += size;
+                }
+            }
+
+            return buffer;
 		}
 		
 		public /*internal*/ byte[] GetBuffer(int index)
@@ -125,13 +126,16 @@
 			return new byte[size];
 		}
 		
-		// Only valid if in a directory
+		
 		public /*internal*/ virtual long GetSizeInBytes()
 		{
-			lock (directory)
-			{
-				return sizeInBytes;
-			}
+            lock (this)
+            {
+                lock (directory)
+                {
+                    return sizeInBytes;
+                }
+            }
 		}
 
         public long length_ForNUnit

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/AttributeSource.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Util/AttributeSource.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/AttributeSource.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/AttributeSource.cs Wed Feb 17 19:33:03 2010
@@ -47,20 +47,8 @@
 			
 			private sealed class DefaultAttributeFactory:AttributeFactory
 			{
-				private static readonly System.Collections.Hashtable attClassImplMap = new System.Collections.Hashtable();
-
-                // {{Aroush-2.9 Port issue, need to mimic java's IdentityHashMap
-                /*
-                 * From Java docs:
-                 * This class implements the Map interface with a hash table, using 
-                 * reference-equality in place of object-equality when comparing keys 
-                 * (and values). In other words, in an IdentityHashMap, two keys k1 and k2 
-                 * are considered equal if and only if (k1==k2). (In normal Map 
-                 * implementations (like HashMap) two keys k1 and k2 are considered 
-                 * equal if and only if (k1==null ? k2==null : k1.equals(k2)).) 
-                 */
-                // Aroush-2.9}}
-				
+                private static readonly SupportClass.WeakHashTable attClassImplMap = new SupportClass.WeakHashTable();
+                
 				internal DefaultAttributeFactory()
 				{
 				}
@@ -72,12 +60,12 @@
 						return (AttributeImpl) System.Activator.CreateInstance(GetClassForInterface(attClass));
 					}
 					catch (System.UnauthorizedAccessException e)
-					{
-						throw new System.ArgumentException("Could not instantiate class " + attClass.FullName);
+                    {
+                        throw new System.ArgumentException("Could not instantiate implementing class for " + attClass.FullName);
 					}
 					catch (System.Exception e)
 					{
-						throw new System.ArgumentException("Could not instantiate class " + attClass.FullName);
+                        throw new System.ArgumentException("Could not instantiate implementing class for " + attClass.FullName);
 					}
 				}
 				
@@ -85,13 +73,14 @@
 				{
 					lock (attClassImplMap)
 					{
-						System.Type clazz = (System.Type) attClassImplMap[attClass];
+                        WeakReference refz = (WeakReference) attClassImplMap[attClass];
+                        System.Type clazz = (refz == null) ? null : ((System.Type) refz.Target);
 						if (clazz == null)
 						{
 							try
 							{
                                 string name = attClass.FullName + "Impl," + attClass.Assembly.FullName;
-								attClassImplMap.Add(attClass, clazz = System.Type.GetType(name, true));
+								attClassImplMap.Add(attClass, new WeakReference( clazz = System.Type.GetType(name, true))); //OK
 							}
 							catch (System.Exception e)
 							{
@@ -180,7 +169,7 @@
 		}
 		
 		/// <summary>a cache that stores all interfaces for known implementation classes for performance (slow reflection) </summary>
-		private static readonly System.Collections.Hashtable knownImplClasses = new System.Collections.Hashtable();
+		private static readonly SupportClass.WeakHashTable knownImplClasses = new SupportClass.WeakHashTable();
 
         // {{Aroush-2.9 Port issue, need to mimic java's IdentityHashMap
         /*
@@ -206,6 +195,8 @@
 				foundInterfaces = (System.Collections.ArrayList) knownImplClasses[clazz];
 				if (foundInterfaces == null)
 				{
+                    // we have a strong reference to the class instance holding all interfaces in the list (parameter "att"),
+                    // so all WeakReferences are never evicted by GC
 					knownImplClasses.Add(clazz, foundInterfaces = new System.Collections.ArrayList());
 					// find all interfaces that this attribute instance implements
 					// and that extend the Attribute interface
@@ -218,7 +209,7 @@
 							System.Type curInterface = interfaces[i];
 							if (curInterface != typeof(Attribute) && typeof(Attribute).IsAssignableFrom(curInterface))
 							{
-								foundInterfaces.Add(curInterface);
+								foundInterfaces.Add(new WeakReference(curInterface));
 							}
 						}
 						actClazz = actClazz.BaseType;
@@ -230,7 +221,9 @@
 			// add all interfaces of this AttributeImpl to the maps
 			for (System.Collections.IEnumerator it = foundInterfaces.GetEnumerator(); it.MoveNext(); )
 			{
-				System.Type curInterface = (System.Type) it.Current;
+                WeakReference curInterfaceRef = (WeakReference)it.Current;
+				System.Type curInterface = (System.Type) curInterfaceRef.Target;
+                System.Diagnostics.Debug.Assert(curInterface != null,"We have a strong reference on the class holding the interfaces, so they should never get evicted");
 				// Attribute is a superclass of this interface
 				if (!attributes.ContainsKey(curInterface))
 				{
@@ -255,6 +248,14 @@
 		{
 			if (!attributes.ContainsKey(attClass))
 			{
+                if (!(attClass.IsInterface &&  typeof(Attribute).IsAssignableFrom(attClass))) 
+                {
+                    throw new ArgumentException(
+                        "AddAttribute() only accepts an interface that extends Attribute, but " +
+                        attClass.FullName + " does not fulfil this contract."
+                    );
+                }
+
 				AttributeImpl attImpl = this.factory.CreateAttributeInstance(attClass);
 				AddAttributeImpl(attImpl);
 				return attImpl;

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Constants.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Util/Constants.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Constants.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Constants.cs Wed Feb 17 19:33:03 2010
@@ -70,7 +70,7 @@
             return s.ToString();
         }
 
-		public static readonly System.String LUCENE_MAIN_VERSION = Ident("2.9.1");
+		public static readonly System.String LUCENE_MAIN_VERSION = Ident("2.9.2");
 		
 		public static System.String LUCENE_VERSION;
 		static Constants()

Modified: lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Version.cs
URL: http://svn.apache.org/viewvc/lucene/lucene.net/trunk/C%23/src/Lucene.Net/Util/Version.cs?rev=911154&r1=911153&r2=911154&view=diff
==============================================================================
--- lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Version.cs (original)
+++ lucene/lucene.net/trunk/C#/src/Lucene.Net/Util/Version.cs Wed Feb 17 19:33:03 2010
@@ -22,20 +22,30 @@
 	
 	/// <summary> Use by certain classes to match version compatibility
 	/// across releases of Lucene.
+    ///  <p/>
+    ///  <b>WARNING</b>: When changing the version parameter
+    ///  that you supply to components in Lucene, do not simply
+    ///  change the version at search-time, but instead also adjust
+    ///  your indexing code to match, and re-index.
 	/// </summary>
 	[Serializable]
 	public sealed class Version:Parameter
 	{
-		
-		/// <summary>Use this to get the latest &amp; greatest settings, bug
-		/// fixes, etc, for Lucene.
-		/// 
+        /// <summary>
 		/// <p/><b>WARNING</b>: if you use this setting, and then
 		/// upgrade to a newer release of Lucene, sizable changes
 		/// may happen.  If precise back compatibility is important
 		/// then you should instead explicitly specify an actual
 		/// version.
+        /// If you use this constant then you may need to
+        /// <b>re-index all of your documents</b> when upgrading
+        /// Lucene, as the way text is indexed may have changed.
+        /// Additionally, you may need to <b>re-test your entire
+        /// application</b> to ensure it behaves as expected, as
+        /// some defaults may have changed and may break functionality
+        /// in your application.
 		/// </summary>
+        [Obsolete("Use an actual version instead.")]
 		public static readonly Version LUCENE_CURRENT = new Version("LUCENE_CURRENT", 0);
 		
 		/// <summary>Match settings and bugs in Lucene's 2.0 release. </summary>
@@ -49,17 +59,14 @@
 		
 		/// <summary>Match settings and bugs in Lucene's 2.3 release. </summary>
 		public static readonly Version LUCENE_23 = new Version("LUCENE_23", 2300);
-		
-		/// <summary>Match settings and bugs in Lucene's 2.4 release.</summary>
-		/// <deprecated> This will be removed in 3.0 
-		/// </deprecated>
-        [Obsolete("This will be removed in 3.0 ")]
+
+        /// <summary>Match settings and bugs in Lucene's 2.3 release. </summary>
 		public static readonly Version LUCENE_24 = new Version("LUCENE_24", 2400);
-		
-		/// <summary>Match settings and bugs in Lucene's 2.9 release.</summary>
-		/// <deprecated> This will be removed in 3.0 
-		/// </deprecated>
-        [Obsolete("This will be removed in 3.0 ")]
+
+        /// <summary>Match settings and bugs in Lucene's 2.3 release. 
+        /// Use this to get the latest & greatest settings, bug
+        /// fixes, etc, for Lucene.
+        /// </summary>
 		public static readonly Version LUCENE_29 = new Version("LUCENE_29", 2900);
 		
 		private int v;



Mime
View raw message