lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1294875 [13/45] - in /incubator/lucene.net/trunk: ./ build/ build/vs2010/contrib/ build/vs2010/test/ doc/ src/ src/contrib/Analyzers/ src/contrib/Analyzers/AR/ src/contrib/Analyzers/BR/ src/contrib/Analyzers/CJK/ src/contrib/Analyzers/Cn/ ...
Date Tue, 28 Feb 2012 22:43:28 GMT
Modified: incubator/lucene.net/trunk/src/core/Document/FieldSelectorResult.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Document/FieldSelectorResult.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Document/FieldSelectorResult.cs (original)
+++ incubator/lucene.net/trunk/src/core/Document/FieldSelectorResult.cs Tue Feb 28 22:43:08 2012
@@ -21,97 +21,51 @@ using System.Runtime.InteropServices;
 
 namespace Lucene.Net.Documents
 {
-	/// <summary>  Provides information about what should be done with this Field 
-	/// 
-	/// 
-	/// </summary>
-	//Replace with an enumerated type in 1.5
-	[Serializable]
-	public sealed class FieldSelectorResult
-	{
-		
-		/// <summary> Load this <see cref="Field" /> every time the <see cref="Document" /> is loaded, reading in the data as it is encountered.
-		/// <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should not return null.
-		/// <p/>
-		/// <see cref="Document.Add(Fieldable)" /> should be called by the Reader.
-		/// </summary>
-		[NonSerialized]
-		public static readonly FieldSelectorResult LOAD = new FieldSelectorResult(0);
-		/// <summary> Lazily load this <see cref="Field" />.  This means the <see cref="Field" /> is valid, but it may not actually contain its data until
-		/// invoked.  <see cref="Document.GetField(String)" /> SHOULD NOT BE USED.  <see cref="Document.GetFieldable(String)" /> is safe to use and should
-		/// return a valid instance of a <see cref="Fieldable" />.
-		/// <p/>
-		/// <see cref="Document.Add(Fieldable)" /> should be called by the Reader.
-		/// </summary>
-		[NonSerialized]
-		public static readonly FieldSelectorResult LAZY_LOAD = new FieldSelectorResult(1);
-		/// <summary> Do not load the <see cref="Field" />.  <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should return null.
-		/// <see cref="Document.Add(Fieldable)" /> is not called.
-		/// <p/>
-		/// <see cref="Document.Add(Fieldable)" /> should not be called by the Reader.
-		/// </summary>
-		[NonSerialized]
-		public static readonly FieldSelectorResult NO_LOAD = new FieldSelectorResult(2);
-		/// <summary> Load this field as in the <see cref="LOAD" /> case, but immediately return from <see cref="Field" /> loading for the <see cref="Document" />.  Thus, the
-		/// Document may not have its complete set of Fields.  <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should
-		/// both be valid for this <see cref="Field" />
-		/// <p/>
-		/// <see cref="Document.Add(Fieldable)" /> should be called by the Reader.
-		/// </summary>
-		[NonSerialized]
-		public static readonly FieldSelectorResult LOAD_AND_BREAK = new FieldSelectorResult(3);
-		/// <summary> Behaves much like <see cref="LOAD" /> but does not uncompress any compressed data.  This is used for internal purposes.
-		/// <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should not return null.
-		/// <p/>
-		/// <see cref="Document.Add(Fieldable)" /> should be called by
-		/// the Reader.
-		/// </summary>
-		/// <deprecated> This is an internal option only, and is
-		/// no longer needed now that <see cref="CompressionTools" />
-		/// is used for field compression.
-		/// </deprecated>
-        [Obsolete("This is an internal option only, and is no longer needed now that CompressionTools is used for field compression.")]
-		[NonSerialized]
-		public static readonly FieldSelectorResult LOAD_FOR_MERGE = new FieldSelectorResult(4);
-		
-		/// <summary>Expert:  Load the size of this <see cref="Field" /> rather than its value.
-		/// Size is measured as number of bytes required to store the field == bytes for a binary or any compressed value, and 2*chars for a String value.
-		/// The size is stored as a binary value, represented as an int in a byte[], with the higher order byte first in [0]
-		/// </summary>
-		[NonSerialized]
-		public static readonly FieldSelectorResult SIZE = new FieldSelectorResult(5);
-		
-		/// <summary>Expert: Like <see cref="SIZE" /> but immediately break from the field loading loop, i.e., stop loading further fields, after the size is loaded </summary>
-		[NonSerialized]
-		public static readonly FieldSelectorResult SIZE_AND_BREAK = new FieldSelectorResult(6);
-		
-		
-		
-		private int id;
-		
-		private FieldSelectorResult(int id)
-		{
-			this.id = id;
-		}
-		
-		public  override bool Equals(System.Object o)
-		{
-			if (this == o)
-				return true;
-			if (o == null || GetType() != o.GetType())
-				return false;
-			
-			FieldSelectorResult that = (FieldSelectorResult) o;
-			
-			if (id != that.id)
-				return false;
-			
-			return true;
-		}
-		
-		public override int GetHashCode()
-		{
-			return id;
-		}
-	}
+	/// <summary>Provides information about what should be done with this Field</summary>
+    public enum FieldSelectorResult
+    {
+        /// <summary>
+        /// 
+        /// </summary>
+        INVALID, // TODO: This is kinda a kludgy workaround for the fact enums can't be null 
+
+        /// <summary> Load this <see cref="Field" /> every time the <see cref="Document" /> is loaded, reading in the data as it is encountered.
+        /// <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should not return null.
+        /// <p/>
+        /// <see cref="Document.Add(Fieldable)" /> should be called by the Reader.
+        /// </summary>
+        LOAD,
+
+        /// <summary> Lazily load this <see cref="Field" />.  This means the <see cref="Field" /> is valid, but it may not actually contain its data until
+        /// invoked.  <see cref="Document.GetField(String)" /> SHOULD NOT BE USED.  <see cref="Document.GetFieldable(String)" /> is safe to use and should
+        /// return a valid instance of a <see cref="Fieldable" />.
+        /// <p/>
+        /// <see cref="Document.Add(Fieldable)" /> should be called by the Reader.
+        /// </summary>
+        LAZY_LOAD,
+
+        /// <summary> Do not load the <see cref="Field" />.  <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should return null.
+        /// <see cref="Document.Add(Fieldable)" /> is not called.
+        /// <p/>
+        /// <see cref="Document.Add(Fieldable)" /> should not be called by the Reader.
+        /// </summary>
+        NO_LOAD,
+
+        /// <summary> Load this field as in the <see cref="LOAD" /> case, but immediately return from <see cref="Field" /> loading for the <see cref="Document" />.  Thus, the
+        /// Document may not have its complete set of Fields.  <see cref="Document.GetField(String)" /> and <see cref="Document.GetFieldable(String)" /> should
+        /// both be valid for this <see cref="Field" />
+        /// <p/>
+        /// <see cref="Document.Add(Fieldable)" /> should be called by the Reader.
+        /// </summary>
+        LOAD_AND_BREAK,
+
+        /// <summary>Expert:  Load the size of this <see cref="Field" /> rather than its value.
+        /// Size is measured as number of bytes required to store the field == bytes for a binary or any compressed value, and 2*chars for a String value.
+        /// The size is stored as a binary value, represented as an int in a byte[], with the higher order byte first in [0]
+        /// </summary>
+        SIZE,
+
+        /// <summary>Expert: Like <see cref="SIZE" /> but immediately break from the field loading loop, i.e., stop loading further fields, after the size is loaded </summary>         
+        SIZE_AND_BREAK
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Document/Fieldable.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Document/Fieldable.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Document/Fieldable.cs (original)
+++ incubator/lucene.net/trunk/src/core/Document/Fieldable.cs Tue Feb 28 22:43:08 2012
@@ -22,7 +22,6 @@ using FieldInvertState = Lucene.Net.Inde
 
 namespace Lucene.Net.Documents
 {
-	
 	/// <summary> Synonymous with <see cref="Field" />.
 	/// 
 	/// <p/><bold>WARNING</bold>: This interface may change within minor versions, despite Lucene's backward compatibility requirements.
@@ -65,7 +64,7 @@ namespace Lucene.Net.Documents
 		/// 
 		/// <p/>Note: this value is not stored directly with the document in the index.
 		/// Documents returned from <see cref="Lucene.Net.Index.IndexReader.Document(int)" /> and
-		/// <see cref="Lucene.Net.Search.Hits.Doc(int)" /> may thus not have the same value present as when
+		/// <see cref="Lucene.Net.Search.Searcher.Doc(int)" /> may thus not have the same value present as when
 		/// this field was indexed.
 		/// 
 		/// </summary>
@@ -81,7 +80,7 @@ namespace Lucene.Net.Documents
 		/// <summary>The value of the field as a String, or null.
 		/// <p/>
 		/// For indexing, if isStored()==true, the stringValue() will be used as the stored field value
-		/// unless isBinary()==true, in which case binaryValue() will be used.
+		/// unless isBinary()==true, in which case GetBinaryValue() will be used.
 		/// 
 		/// If isIndexed()==true and isTokenized()==false, this String value will be indexed as a single token.
 		/// If isIndexed()==true and isTokenized()==true, then tokenStreamValue() will be used to generate indexed tokens if not null,
@@ -94,11 +93,6 @@ namespace Lucene.Net.Documents
 		/// </seealso>
 		System.IO.TextReader ReaderValue();
 		
-		/// <summary>The value of the field in Binary, or null.</summary>
-		/// <seealso cref="StringValue()">
-		/// </seealso>
-		byte[] BinaryValue();
-		
 		/// <summary>The TokenStream for this field to be used when indexing, or null.</summary>
 		/// <seealso cref="StringValue()">
 		/// </seealso>
@@ -120,9 +114,6 @@ namespace Lucene.Net.Documents
 		/// </summary>
 		bool IsTokenized();
 		
-		/// <summary>True if the value of the field is stored and compressed within the index </summary>
-		bool IsCompressed();
-		
 		/// <summary>True if the term or terms used to index this field are stored as a term
 		/// vector, available from <see cref="Lucene.Net.Index.IndexReader.GetTermFreqVector(int,String)" />.
 		/// These methods do not provide access to the original content of the field,
@@ -155,18 +146,8 @@ namespace Lucene.Net.Documents
 		/// </summary>
 		void  SetOmitNorms(bool omitNorms);
 		
-		/// <deprecated> Renamed to <see cref="AbstractField.SetOmitTermFreqAndPositions" /> 
-		/// </deprecated>
-        [Obsolete("Renamed to AbstractField.SetOmitTermFreqAndPositions")]
-		void  SetOmitTf(bool omitTf);
-		
-		/// <deprecated> Renamed to <see cref="AbstractField.GetOmitTermFreqAndPositions" /> 
-		/// </deprecated>
-        [Obsolete("Renamed to AbstractField.GetOmitTermFreqAndPositions")]
-		bool GetOmitTf();
-		
 		/// <summary> Indicates whether a Field is Lazy or not.  The semantics of Lazy loading are such that if a Field is lazily loaded, retrieving
-		/// it's values via <see cref="StringValue()" /> or <see cref="BinaryValue()" /> is only valid as long as the <see cref="Lucene.Net.Index.IndexReader" /> that
+		/// it's values via <see cref="StringValue()" /> or <see cref="GetBinaryValue()" /> is only valid as long as the <see cref="Lucene.Net.Index.IndexReader" /> that
 		/// retrieved the <see cref="Document" /> is still open.
 		/// 
 		/// </summary>
@@ -204,7 +185,7 @@ namespace Lucene.Net.Documents
 		/// About reuse: if you pass in the result byte[] and it is
 		/// used, likely the underlying implementation will hold
 		/// onto this byte[] and return it in future calls to
-		/// <see cref="BinaryValue()" /> or <see cref="GetBinaryValue()" />.
+		/// <see cref="GetBinaryValue()" /> or <see cref="GetBinaryValue()" />.
 		/// So if you subsequently re-use the same byte[] elsewhere
 		/// it will alter this Fieldable's value.
 		/// </summary>
@@ -215,5 +196,23 @@ namespace Lucene.Net.Documents
 		/// <returns> reference to the Field value as byte[].
 		/// </returns>
 		byte[] GetBinaryValue(byte[] result);
+
+        /// <seealso cref="SetOmitTermFreqAndPositions"/>
+        bool GetOmitTermFreqAndPositions();
+
+        /// Expert:
+        /// <para>
+        /// If set, omit term freq, positions and payloads from
+        /// postings for this field.
+        /// </para>
+        /// <para>
+        /// <b>NOTE</b>: While this option reduces storage space
+        /// required in the index, it also means any query
+        /// requiring positional information, such as
+        /// <see cref="Lucene.Net.Search.PhraseQuery"/> or 
+        /// <see cref="Lucene.Net.Search.Spans.SpanQuery"/> 
+        /// subclasses will silently fail to find results.
+        /// </para>
+        void SetOmitTermFreqAndPositions(bool omitTermFreqAndPositions);
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Document/MapFieldSelector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Document/MapFieldSelector.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Document/MapFieldSelector.cs (original)
+++ incubator/lucene.net/trunk/src/core/Document/MapFieldSelector.cs Tue Feb 28 22:43:08 2012
@@ -16,23 +16,22 @@
  */
 
 using System;
+using System.Linq;
+using System.Collections.Generic;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Documents
 {
-	
-	/// <summary> A <see cref="FieldSelector" /> based on a Map of field names to <see cref="FieldSelectorResult" />s
-	/// 
-	/// </summary>
+	/// <summary>A <see cref="FieldSelector" /> based on a Map of field names to <see cref="FieldSelectorResult" />s</summary>
 	[Serializable]
 	public class MapFieldSelector : FieldSelector
 	{
-		
-		internal System.Collections.IDictionary fieldSelections;
+		internal IDictionary<string, FieldSelectorResult> fieldSelections;
 		
 		/// <summary>Create a a MapFieldSelector</summary>
 		/// <param name="fieldSelections">maps from field names (String) to <see cref="FieldSelectorResult" />s
 		/// </param>
-		public MapFieldSelector(System.Collections.IDictionary fieldSelections)
+        public MapFieldSelector(IDictionary<string, FieldSelectorResult> fieldSelections)
 		{
 			this.fieldSelections = fieldSelections;
 		}
@@ -40,21 +39,19 @@ namespace Lucene.Net.Documents
 		/// <summary>Create a a MapFieldSelector</summary>
 		/// <param name="fields">fields to LOAD.  List of Strings.  All other fields are NO_LOAD.
 		/// </param>
-		public MapFieldSelector(System.Collections.IList fields)
+		public MapFieldSelector(IList<string> fields)
 		{
-			fieldSelections = new System.Collections.Hashtable(fields.Count * 5 / 3);
-			for (int i = 0; i < fields.Count; i++)
-				fieldSelections[fields[i]] = FieldSelectorResult.LOAD;
+			fieldSelections = new HashMap<string, FieldSelectorResult>(fields.Count * 5 / 3);
+			foreach(var field in fields)
+				fieldSelections[field] = FieldSelectorResult.LOAD;
 		}
 		
 		/// <summary>Create a a MapFieldSelector</summary>
 		/// <param name="fields">fields to LOAD.  All other fields are NO_LOAD.
 		/// </param>
-		public MapFieldSelector(System.String[] fields)
+		public MapFieldSelector(params System.String[] fields)
+            : this(fields.ToList()) // TODO: this is slow
 		{
-			fieldSelections = new System.Collections.Hashtable(fields.Length * 5 / 3);
-			for (int i = 0; i < fields.Length; i++)
-				fieldSelections[fields[i]] = FieldSelectorResult.LOAD;
 		}
 		
 		/// <summary>Load field according to its associated value in fieldSelections</summary>
@@ -64,8 +61,8 @@ namespace Lucene.Net.Documents
 		/// </returns>
 		public virtual FieldSelectorResult Accept(System.String field)
 		{
-			FieldSelectorResult selection = (FieldSelectorResult) fieldSelections[field];
-			return selection != null?selection:FieldSelectorResult.NO_LOAD;
+		    FieldSelectorResult selection = fieldSelections[field];
+            return selection != FieldSelectorResult.INVALID ? selection : FieldSelectorResult.NO_LOAD; // TODO: See FieldSelectorResult
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Document/NumberTools.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Document/NumberTools.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Document/NumberTools.cs (original)
+++ incubator/lucene.net/trunk/src/core/Document/NumberTools.cs Tue Feb 28 22:43:08 2012
@@ -16,9 +16,8 @@
  */
 
 using System;
-
+using Lucene.Net.Search;
 using NumericUtils = Lucene.Net.Util.NumericUtils;
-using NumericRangeQuery = Lucene.Net.Search.NumericRangeQuery;
 
 namespace Lucene.Net.Documents
 {
@@ -42,11 +41,11 @@ namespace Lucene.Net.Documents
 	/// provides a sortable binary representation (prefix encoded) of numeric
 	/// values.
 	/// To index and efficiently query numeric values use <see cref="NumericField" />
-	/// and <see cref="NumericRangeQuery" />.
+	/// and <see cref="NumericRangeQuery{T}" />.
 	/// This class is included for use with existing
-	/// indices and will be removed in a future release.
+	/// indices and will be removed in a future release (possibly Lucene 4.0).
 	/// </deprecated>
-    [Obsolete("For new indexes use NumericUtils instead, which provides a sortable binary representation (prefix encoded) of numeric values. To index and efficiently query numeric values use NumericField and NumericRangeQuery. This class is included for use with existing indices and will be removed in a future release.")]
+    [Obsolete("For new indexes use NumericUtils instead, which provides a sortable binary representation (prefix encoded) of numeric values. To index and efficiently query numeric values use NumericField and NumericRangeQuery. This class is included for use with existing indices and will be removed in a future release (possibly Lucene 4.0).")]
 	public class NumberTools
 	{
 		

Modified: incubator/lucene.net/trunk/src/core/Document/NumericField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Document/NumericField.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Document/NumericField.cs (original)
+++ incubator/lucene.net/trunk/src/core/Document/NumericField.cs Tue Feb 28 22:43:08 2012
@@ -16,13 +16,11 @@
  */
 
 using System;
-
+using Lucene.Net.Search;
 using NumericTokenStream = Lucene.Net.Analysis.NumericTokenStream;
 using TokenStream = Lucene.Net.Analysis.TokenStream;
 using NumericUtils = Lucene.Net.Util.NumericUtils;
 using FieldCache = Lucene.Net.Search.FieldCache;
-using NumericRangeFilter = Lucene.Net.Search.NumericRangeFilter;
-using NumericRangeQuery = Lucene.Net.Search.NumericRangeQuery;
 using SortField = Lucene.Net.Search.SortField;
 
 namespace Lucene.Net.Documents
@@ -67,11 +65,10 @@ namespace Lucene.Net.Documents
 	/// <c>long</c> value.<p/>
 	/// 
 	/// <p/>To perform range querying or filtering against a
-	/// <c>NumericField</c>, use <see cref="NumericRangeQuery" /> or <see cref="NumericRangeFilter" />
+	/// <c>NumericField</c>, use <see cref="NumericRangeQuery{T}" /> or <see cref="NumericRangeFilter{T}" />
 	///.  To sort according to a
 	/// <c>NumericField</c>, use the normal numeric sort types, eg
-	/// <see cref="SortField.INT" /> (note that <see cref="SortField.AUTO" />
-	/// will not work with these fields).  <c>NumericField</c> values
+	/// <see cref="SortField.INT" />  <c>NumericField</c> values
 	/// can also be loaded directly from <see cref="FieldCache" />.<p/>
 	/// 
 	/// <p/>By default, a <c>NumericField</c>'s value is not stored but
@@ -106,8 +103,8 @@ namespace Lucene.Net.Documents
 	/// use the expert constructor <see cref="NumericField(String,int,Field.Store,bool)" />
 	/// if you'd
 	/// like to change the value.  Note that you must also
-	/// specify a congruent value when creating <see cref="NumericRangeQuery" />
-	/// or <see cref="NumericRangeFilter" />.
+	/// specify a congruent value when creating <see cref="NumericRangeQuery{T}" />
+	/// or <see cref="NumericRangeFilter{T}" />.
 	/// For low cardinality fields larger precision steps are good.
 	/// If the cardinality is &lt; 100, it is fair
 	/// to use <see cref="int.MaxValue" />, which produces one
@@ -116,7 +113,7 @@ namespace Lucene.Net.Documents
 	/// <p/>For more information on the internals of numeric trie
 	/// indexing, including the <a
 	/// href="../search/NumericRangeQuery.html#precisionStepDesc"><c>precisionStep</c></a>
-	/// configuration, see <see cref="NumericRangeQuery" />. The format of
+	/// configuration, see <see cref="NumericRangeQuery{T}" />. The format of
 	/// indexed values is described in <see cref="NumericUtils" />.
 	/// 
 	/// <p/>If you only need to sort by numeric value, and never
@@ -218,12 +215,6 @@ namespace Lucene.Net.Documents
 		}
 		
 		/// <summary>Returns always <c>null</c> for numeric fields </summary>
-		public override byte[] BinaryValue()
-		{
-			return null;
-		}
-		
-		/// <summary>Returns always <c>null</c> for numeric fields </summary>
 		public override byte[] GetBinaryValue(byte[] result)
 		{
 			return null;
@@ -256,7 +247,7 @@ namespace Lucene.Net.Documents
 		public NumericField SetLongValue(long value_Renamed)
 		{
 			tokenStream.SetLongValue(value_Renamed);
-			fieldsData = (long) value_Renamed;
+			fieldsData = value_Renamed;
 			return this;
 		}
 		
@@ -269,7 +260,7 @@ namespace Lucene.Net.Documents
 		public NumericField SetIntValue(int value_Renamed)
 		{
 			tokenStream.SetIntValue(value_Renamed);
-			fieldsData = (System.Int32) value_Renamed;
+			fieldsData = value_Renamed;
 			return this;
 		}
 		
@@ -282,7 +273,7 @@ namespace Lucene.Net.Documents
 		public NumericField SetDoubleValue(double value_Renamed)
 		{
 			tokenStream.SetDoubleValue(value_Renamed);
-			fieldsData = (double) value_Renamed;
+			fieldsData = value_Renamed;
 			return this;
 		}
 		
@@ -295,7 +286,7 @@ namespace Lucene.Net.Documents
 		public NumericField SetFloatValue(float value_Renamed)
 		{
 			tokenStream.SetFloatValue(value_Renamed);
-			fieldsData = (float) value_Renamed;
+			fieldsData = value_Renamed;
 			return this;
 		}
 	}

Modified: incubator/lucene.net/trunk/src/core/Document/SetBasedFieldSelector.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Document/SetBasedFieldSelector.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Document/SetBasedFieldSelector.cs (original)
+++ incubator/lucene.net/trunk/src/core/Document/SetBasedFieldSelector.cs Tue Feb 28 22:43:08 2012
@@ -16,6 +16,7 @@
  */
 
 using System;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Documents
 {
@@ -26,11 +27,8 @@ namespace Lucene.Net.Documents
 	[Serializable]
 	public class SetBasedFieldSelector : FieldSelector
 	{
-		
-		private System.Collections.Hashtable fieldsToLoad;
-		private System.Collections.Hashtable lazyFieldsToLoad;
-		
-		
+		private ISet<string> fieldsToLoad;
+		private ISet<string> lazyFieldsToLoad;
 		
 		/// <summary> Pass in the Set of <see cref="Field" /> names to load and the Set of <see cref="Field" /> names to load lazily.  If both are null, the
 		/// Document will not have any <see cref="Field" /> on it.  
@@ -39,7 +37,7 @@ namespace Lucene.Net.Documents
 		/// </param>
 		/// <param name="lazyFieldsToLoad">A Set of <see cref="String" /> field names to load lazily.  May be empty, but not null  
 		/// </param>
-		public SetBasedFieldSelector(System.Collections.Hashtable fieldsToLoad, System.Collections.Hashtable lazyFieldsToLoad)
+		public SetBasedFieldSelector(ISet<string> fieldsToLoad, ISet<string> lazyFieldsToLoad)
 		{
 			this.fieldsToLoad = fieldsToLoad;
 			this.lazyFieldsToLoad = lazyFieldsToLoad;

Modified: incubator/lucene.net/trunk/src/core/Index/AbstractAllTermDocs.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/AbstractAllTermDocs.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/AbstractAllTermDocs.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/AbstractAllTermDocs.cs Tue Feb 28 22:43:08 2012
@@ -105,8 +105,16 @@ namespace Lucene.Net.Index
 
         public void Close()
         {
+            Dispose();
         }
 
+        public void Dispose()
+        {
+            Dispose(true);
+        }
+
+        protected abstract void Dispose(bool disposing);
+
         public abstract bool IsDeleted(int doc);
     }
 }

Modified: incubator/lucene.net/trunk/src/core/Index/AllTermDocs.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/AllTermDocs.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/AllTermDocs.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/AllTermDocs.cs Tue Feb 28 22:43:08 2012
@@ -34,6 +34,11 @@ namespace Lucene.Net.Index
 			}
 		}
 
+        protected override void Dispose(bool disposing)
+        {
+            // Do nothing.
+        }
+
         public override bool IsDeleted(int doc)
         {
             return deletedDocs != null && deletedDocs.Get(doc);

Modified: incubator/lucene.net/trunk/src/core/Index/BufferedDeletes.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/BufferedDeletes.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/BufferedDeletes.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/BufferedDeletes.cs Tue Feb 28 22:43:08 2012
@@ -16,6 +16,9 @@
  */
 
 using System;
+using System.Collections.Generic;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -31,9 +34,9 @@ namespace Lucene.Net.Index
 	class BufferedDeletes
 	{
 		internal int numTerms;
-        internal System.Collections.IDictionary terms = null;
-		internal System.Collections.Hashtable queries = new System.Collections.Hashtable();
-		internal System.Collections.ArrayList docIDs = new System.Collections.ArrayList();
+        internal IDictionary<Term,Num> terms = null;
+		internal IDictionary<Query, int> queries = new HashMap<Query, int>();
+		internal List<int> docIDs = new List<int>();
 		internal long bytesUsed;
         internal  bool doTermSort;
 
@@ -42,11 +45,12 @@ namespace Lucene.Net.Index
             this.doTermSort = doTermSort;
             if (doTermSort)
             {
-                terms = new System.Collections.Generic.SortedDictionary<object, object>();
+                //TODO: Used in place of TreeMap
+                terms = new SortedDictionary<Term, Num>();
             }
             else
             {
-                terms = new System.Collections.Hashtable();
+                terms = new HashMap<Term, Num>();
             }
         }
                 
@@ -92,16 +96,14 @@ namespace Lucene.Net.Index
 		{
 			numTerms += in_Renamed.numTerms;
 			bytesUsed += in_Renamed.bytesUsed;
-
-			System.Collections.ArrayList keys = new System.Collections.ArrayList(in_Renamed.terms.Keys);
-			System.Collections.ArrayList values = new System.Collections.ArrayList(in_Renamed.terms.Values);
-			for (int i=0; i < keys.Count; i++)
-				terms[keys[i]] = values[i];
-
-			keys = new System.Collections.ArrayList(in_Renamed.queries.Keys);
-			values = new System.Collections.ArrayList(in_Renamed.queries.Values);
-			for (int i=0; i < keys.Count; i++)
-				queries[keys[i]] = values[i];
+		    foreach (KeyValuePair<Term, Num> term in in_Renamed.terms)
+            {
+                terms[term.Key] = term.Value;
+		    }
+            foreach (KeyValuePair<Query, int> term in in_Renamed.queries)
+            {
+                queries[term.Key] = term.Value;
+            }
 
 			docIDs.AddRange(in_Renamed.docIDs);
 			in_Renamed.Clear();
@@ -132,25 +134,22 @@ namespace Lucene.Net.Index
 		{
 			lock (this)
 			{
-				
-				System.Collections.IDictionary newDeleteTerms;
+				IDictionary<Term, Num> newDeleteTerms;
 				
 				// Remap delete-by-term
 				if (terms.Count > 0)
 				{
                     if (doTermSort)
                     {
-                        newDeleteTerms = new System.Collections.Generic.SortedDictionary<object, object>();
+                        newDeleteTerms = new SortedDictionary<Term, Num>();
                     }
                     else
                     {
-                        newDeleteTerms = new System.Collections.Hashtable();
+                        newDeleteTerms = new HashMap<Term, Num>();
                     }
-					System.Collections.IEnumerator iter = new System.Collections.Hashtable(terms).GetEnumerator();
-					while (iter.MoveNext())
+					foreach(var entry in terms)
 					{
-						System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iter.Current;
-						Num num = (Num) entry.Value;
+						Num num = entry.Value;
 						newDeleteTerms[entry.Key] = new Num(mapper.Remap(num.GetNum()));
 					}
 				}
@@ -158,33 +157,29 @@ namespace Lucene.Net.Index
 					newDeleteTerms = null;
 				
 				// Remap delete-by-docID
-				System.Collections.ArrayList newDeleteDocIDs;
+				List<int> newDeleteDocIDs;
 				
 				if (docIDs.Count > 0)
 				{
-					newDeleteDocIDs = new System.Collections.ArrayList(docIDs.Count);
-					System.Collections.IEnumerator iter = docIDs.GetEnumerator();
-					while (iter.MoveNext())
+					newDeleteDocIDs = new List<int>(docIDs.Count);
+					foreach(int num in docIDs)
 					{
-						System.Int32 num = (System.Int32) iter.Current;
-						newDeleteDocIDs.Add((System.Int32) mapper.Remap(num));
+						newDeleteDocIDs.Add(mapper.Remap(num));
 					}
 				}
 				else
 					newDeleteDocIDs = null;
 				
 				// Remap delete-by-query
-				System.Collections.Hashtable newDeleteQueries;
+				HashMap<Query, int> newDeleteQueries;
 				
 				if (queries.Count > 0)
 				{
-					newDeleteQueries = new System.Collections.Hashtable(queries.Count);
-					System.Collections.IEnumerator iter = new System.Collections.Hashtable(queries).GetEnumerator();
-					while (iter.MoveNext())
+                    newDeleteQueries = new HashMap<Query, int>(queries.Count);
+					foreach(var entry in queries)
 					{
-						System.Collections.DictionaryEntry entry = (System.Collections.DictionaryEntry) iter.Current;
-						System.Int32 num = (System.Int32) entry.Value;
-						newDeleteQueries[entry.Key] = (System.Int32) mapper.Remap(num);
+						int num = entry.Value;
+						newDeleteQueries[entry.Key] = mapper.Remap(num);
 					}
 				}
 				else

Modified: incubator/lucene.net/trunk/src/core/Index/ByteBlockPool.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/ByteBlockPool.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/ByteBlockPool.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/ByteBlockPool.cs Tue Feb 28 22:43:08 2012
@@ -34,6 +34,7 @@
 
 using System;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -48,7 +49,7 @@ namespace Lucene.Net.Index
 		public /*internal*/ abstract class Allocator
 		{
 			public /*internal*/ abstract void  RecycleByteBlocks(byte[][] blocks, int start, int end);
-            public /*internal*/ abstract void RecycleByteBlocks(System.Collections.ArrayList blocks);
+            public /*internal*/ abstract void RecycleByteBlocks(IList<byte[]> blocks);
 			public /*internal*/ abstract byte[] GetByteBlock(bool trackAllocations);
 		}
 		
@@ -152,9 +153,9 @@ namespace Lucene.Net.Index
 			buffer[newUpto + 2] = slice[upto - 1];
 			
 			// Write forwarding address at end of last slice:
-			slice[upto - 3] = (byte) (SupportClass.Number.URShift(offset, 24));
-			slice[upto - 2] = (byte) (SupportClass.Number.URShift(offset, 16));
-			slice[upto - 1] = (byte) (SupportClass.Number.URShift(offset, 8));
+			slice[upto - 3] = (byte) (Number.URShift(offset, 24));
+			slice[upto - 2] = (byte) (Number.URShift(offset, 16));
+			slice[upto - 1] = (byte) (Number.URShift(offset, 8));
 			slice[upto] = (byte) offset;
 			
 			// Write new level:

Modified: incubator/lucene.net/trunk/src/core/Index/ByteSliceReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/ByteSliceReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/ByteSliceReader.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/ByteSliceReader.cs Tue Feb 28 22:43:08 2012
@@ -28,7 +28,7 @@ namespace Lucene.Net.Index
 	* each slice until we hit the end of that slice at which
 	* point we read the forwarding address of the next slice
 	* and then jump to it.*/
-	public sealed class ByteSliceReader:IndexInput
+	public sealed class ByteSliceReader : IndexInput
 	{
 		internal ByteBlockPool pool;
 		internal int bufferUpto;
@@ -169,10 +169,11 @@ namespace Lucene.Net.Index
 		{
 			throw new System.SystemException("not implemented");
 		}
-		public override void  Close()
-		{
-			throw new System.SystemException("not implemented");
-		}
+
+        protected override void Dispose(bool disposing)
+        {
+            // Do nothing...
+        }
 		
 		override public System.Object Clone()
 		{

Modified: incubator/lucene.net/trunk/src/core/Index/ByteSliceWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/ByteSliceWriter.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/ByteSliceWriter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/ByteSliceWriter.cs Tue Feb 28 22:43:08 2012
@@ -16,6 +16,7 @@
  */
 
 using System;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -81,18 +82,24 @@ namespace Lucene.Net.Index
 				System.Diagnostics.Debug.Assert(upto != slice.Length);
 			}
 		}
-		
-		public int GetAddress()
-		{
-			return upto + (offset0 & DocumentsWriter.BYTE_BLOCK_NOT_MASK);
-		}
-		
-		public void  WriteVInt(int i)
+
+	    public int Address
+	    {
+	        get { return upto + (offset0 & DocumentsWriter.BYTE_BLOCK_NOT_MASK); }
+	    }
+
+        [Obsolete("Use Address property instead.")]
+        public int GetAddress()
+        {
+            return Address;
+        }
+
+	    public void  WriteVInt(int i)
 		{
 			while ((i & ~ 0x7F) != 0)
 			{
 				WriteByte((byte) ((i & 0x7f) | 0x80));
-				i = SupportClass.Number.URShift(i, 7);
+				i = Number.URShift(i, 7);
 			}
 			WriteByte((byte) i);
 		}

Modified: incubator/lucene.net/trunk/src/core/Index/CheckIndex.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/CheckIndex.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/CheckIndex.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/CheckIndex.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using Lucene.Net.Support;
 using AbstractField = Lucene.Net.Documents.AbstractField;
 using Document = Lucene.Net.Documents.Document;
 using Directory = Lucene.Net.Store.Directory;
@@ -40,14 +41,6 @@ namespace Lucene.Net.Index
 	/// </summary>
 	public class CheckIndex
 	{
-		
-		/// <summary>Default PrintStream for all CheckIndex instances.</summary>
-		/// <deprecated> Use <see cref="SetInfoStream" /> per instance,
-		/// instead. 
-		/// </deprecated>
-        [Obsolete("Use SetInfoStream per instance,instead.")]
-		public static System.IO.StreamWriter out_Renamed = null;
-		
 		private System.IO.StreamWriter infoStream;
 		private Directory dir;
 		
@@ -85,13 +78,13 @@ namespace Lucene.Net.Index
 			/// <summary>Empty unless you passed specific segments list to check as optional 3rd argument.</summary>
 			/// <seealso cref="CheckIndex.CheckIndex_Renamed_Method(System.Collections.IList)">
 			/// </seealso>
-			public System.Collections.IList segmentsChecked = new System.Collections.ArrayList();
+			public List<string> segmentsChecked = new List<string>();
 			
 			/// <summary>True if the index was created with a newer version of Lucene than the CheckIndex tool. </summary>
 			public bool toolOutOfDate;
 			
 			/// <summary>List of <see cref="SegmentInfoStatus" /> instances, detailing status of each segment. </summary>
-			public System.Collections.IList segmentInfos = new System.Collections.ArrayList();
+			public IList<SegmentInfoStatus> segmentInfos = new List<SegmentInfoStatus>();
 			
 			/// <summary>Directory index is in. </summary>
 			public Directory dir;
@@ -115,7 +108,7 @@ namespace Lucene.Net.Index
 			public bool partial;
 			
 			/// <summary>Holds the userData of the last commit in the index </summary>
-            public System.Collections.Generic.IDictionary<string, string> userData;
+            public IDictionary<string, string> userData;
 			
 			/// <summary>Holds the status of each segment in the index.
 			/// See <see cref="SegmentInfos" />.
@@ -186,7 +179,7 @@ namespace Lucene.Net.Index
 				/// debugging details that IndexWriter records into
 				/// each segment it creates 
 				/// </summary>
-                public System.Collections.Generic.IDictionary<string, string> diagnostics;
+                public IDictionary<string, string> diagnostics;
 				
 				/// <summary>Status for testing of field norms (null if field norms could not be tested). </summary>
 				public FieldNormStatus fieldNormStatus;
@@ -260,7 +253,7 @@ namespace Lucene.Net.Index
 		public CheckIndex(Directory dir)
 		{
 			this.dir = dir;
-			infoStream = out_Renamed;
+			infoStream = null;
 		}
 		
 		/// <summary>Set infoStream where messages should go.  If null, no
@@ -298,29 +291,6 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		/// <summary>Returns true if index is clean, else false. </summary>
-        /// <deprecated> Please instantiate a CheckIndex and then use <see cref="CheckIndex_Renamed_Method()" /> instead 
-		/// </deprecated>
-        [Obsolete("Please instantiate a CheckIndex and then use CheckIndex() instead")]
-		public static bool Check(Directory dir, bool doFix)
-		{
-			return Check(dir, doFix, null);
-		}
-		
-		/// <summary>Returns true if index is clean, else false.</summary>
-        /// <deprecated> Please instantiate a CheckIndex and then use <see cref="CheckIndex_Renamed_Method(System.Collections.IList)" /> instead 
-		/// </deprecated>
-        [Obsolete("Please instantiate a CheckIndex and then use CheckIndex(List) instead")]
-		public static bool Check(Directory dir, bool doFix, System.Collections.IList onlySegments)
-		{
-			CheckIndex checker = new CheckIndex(dir);
-			Status status = checker.CheckIndex_Renamed_Method(onlySegments);
-			if (doFix && !status.clean)
-				checker.FixIndex(status);
-			
-			return status.clean;
-		}
-		
 		/// <summary>Returns a <see cref="Status" /> instance detailing
 		/// the state of the index.
 		/// 
@@ -350,7 +320,7 @@ namespace Lucene.Net.Index
 		/// you only call this when the index is not opened by any
 		/// writer. 
 		/// </param>
-		public virtual Status CheckIndex_Renamed_Method(System.Collections.IList onlySegments)
+		public virtual Status CheckIndex_Renamed_Method(List<string> onlySegments)
 		{
             System.Globalization.NumberFormatInfo nf = System.Globalization.CultureInfo.CurrentCulture.NumberFormat;
 			SegmentInfos sis = new SegmentInfos();
@@ -444,7 +414,7 @@ namespace Lucene.Net.Index
 			System.String userDataString;
 			if (sis.GetUserData().Count > 0)
 			{
-				userDataString = " userData=" + SupportClass.CollectionsHelper.CollectionToString(sis.GetUserData());
+				userDataString = " userData=" + CollectionsHelper.CollectionToString(sis.GetUserData());
 			}
 			else
 			{
@@ -458,19 +428,14 @@ namespace Lucene.Net.Index
 				result.partial = true;
 				if (infoStream != null)
 					infoStream.Write("\nChecking only these segments:");
-				System.Collections.IEnumerator it = onlySegments.GetEnumerator();
-				while (it.MoveNext())
+                foreach(string s in onlySegments)
 				{
 					if (infoStream != null)
 					{
-						infoStream.Write(" " + it.Current);
+						infoStream.Write(" " + s);
 					}
 				}
-                System.Collections.IEnumerator e = onlySegments.GetEnumerator();
-                while (e.MoveNext() == true)
-                {
-                    result.segmentsChecked.Add(e.Current);
-                }
+                result.segmentsChecked.AddRange(onlySegments);
                 Msg(":");
 			}
 			
@@ -510,11 +475,11 @@ namespace Lucene.Net.Index
 					segInfoStat.numFiles = info.Files().Count;
 					Msg(System.String.Format(nf, "    size (MB)={0:f}", new System.Object[] { (info.SizeInBytes() / (1024.0 * 1024.0)) }));
 					segInfoStat.sizeMB = info.SizeInBytes() / (1024.0 * 1024.0);
-                    System.Collections.Generic.IDictionary<string, string> diagnostics = info.GetDiagnostics();
+                    IDictionary<string, string> diagnostics = info.GetDiagnostics();
 					segInfoStat.diagnostics = diagnostics;
 					if (diagnostics.Count > 0)
 					{
-						Msg("    diagnostics = " + SupportClass.CollectionsHelper.CollectionToString(diagnostics));
+						Msg("    diagnostics = " + CollectionsHelper.CollectionToString(diagnostics));
 					}
 					
 					int docStoreOffset = info.GetDocStoreOffset();
@@ -541,7 +506,7 @@ namespace Lucene.Net.Index
 					}
 					if (infoStream != null)
 						infoStream.Write("    test: open reader.........");
-					reader = SegmentReader.Get(info);
+					reader = SegmentReader.Get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 					
 					segInfoStat.openReaderPassed = true;
 					
@@ -580,7 +545,7 @@ namespace Lucene.Net.Index
 					{
 						infoStream.Write("    test: fields..............");
 					}
-                    System.Collections.Generic.ICollection<string> fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
+                    ICollection<string> fieldNames = reader.GetFieldNames(IndexReader.FieldOption.ALL);
 					Msg("OK [" + fieldNames.Count + " fields]");
 					segInfoStat.numFields = fieldNames.Count;
 					
@@ -637,7 +602,7 @@ namespace Lucene.Net.Index
 				}
 				
 				// Keeper
-				result.newSegments.Add(info.Clone());
+				result.newSegments.Add((SegmentInfo)info.Clone());
 			}
 			
 			if (0 == result.numBadSegments)
@@ -652,7 +617,7 @@ namespace Lucene.Net.Index
 		}
 		
 		/// <summary> Test field norms.</summary>
-        private Status.FieldNormStatus TestFieldNorms(System.Collections.Generic.ICollection<string> fieldNames, SegmentReader reader)
+        private Status.FieldNormStatus TestFieldNorms(ICollection<string> fieldNames, SegmentReader reader)
 		{
 			Status.FieldNormStatus status = new Status.FieldNormStatus();
 			
@@ -663,11 +628,10 @@ namespace Lucene.Net.Index
 				{
 					infoStream.Write("    test: field norms.........");
 				}
-				System.Collections.IEnumerator it = fieldNames.GetEnumerator();
+
 				byte[] b = new byte[reader.MaxDoc()];
-				while (it.MoveNext())
+				foreach(string fieldName in fieldNames)
 				{
-					System.String fieldName = (System.String) it.Current;
                     if (reader.HasNorms(fieldName))
                     {
                         reader.Norms(fieldName, b, 0);
@@ -752,6 +716,7 @@ namespace Lucene.Net.Index
 							{
 								throw new System.SystemException("term " + term + ": doc " + doc + ": pos " + pos + " < lastPos " + lastPos);
 							}
+						    lastPos = pos;
 						}
 					}
 					
@@ -943,7 +908,7 @@ namespace Lucene.Net.Index
 		{
 			
 			bool doFix = false;
-			System.Collections.IList onlySegments = new System.Collections.ArrayList();
+			List<string> onlySegments = new List<string>();
 			System.String indexPath = null;
 			int i = 0;
 			while (i < args.Length)
@@ -997,7 +962,7 @@ namespace Lucene.Net.Index
 			Directory dir = null;
 			try
 			{
-				dir = FSDirectory.Open(new System.IO.FileInfo(indexPath));
+				dir = FSDirectory.Open(new System.IO.DirectoryInfo(indexPath));
 			}
 			catch (System.Exception t)
 			{

Modified: incubator/lucene.net/trunk/src/core/Index/CompoundFileReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/CompoundFileReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/CompoundFileReader.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/CompoundFileReader.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using System.Linq;
+using Lucene.Net.Support;
 using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
 using Directory = Lucene.Net.Store.Directory;
 using IndexInput = Lucene.Net.Store.IndexInput;
@@ -30,12 +31,8 @@ namespace Lucene.Net.Index
 	/// <summary> Class for accessing a compound stream.
 	/// This class implements a directory, but is limited to only read operations.
 	/// Directory methods that would normally modify data throw an exception.
-	/// 
-	/// 
 	/// </summary>
-	/// <version>  $Id: CompoundFileReader.java 673371 2008-07-02 11:57:27Z mikemccand $
-	/// </version>
-	public class CompoundFileReader:Directory
+	public class CompoundFileReader : Directory
 	{
 		
 		private int readBufferSize;
@@ -45,14 +42,15 @@ namespace Lucene.Net.Index
 			internal long offset;
 			internal long length;
 		}
-		
+
+	    private bool isDisposed;
 		
 		// Base info
 		private Directory directory;
 		private System.String fileName;
 		
 		private IndexInput stream;
-		private System.Collections.Hashtable entries = new System.Collections.Hashtable();
+		private HashMap<string, FileEntry> entries = new HashMap<string, FileEntry>();
 		
 		
 		public CompoundFileReader(Directory dir, System.String name):this(dir, name, BufferedIndexInput.BUFFER_SIZE)
@@ -112,36 +110,50 @@ namespace Lucene.Net.Index
 				}
 			}
 		}
-		
-		public virtual Directory GetDirectory()
-		{
-			return directory;
-		}
-		
-		public virtual System.String GetName()
-		{
-			return fileName;
-		}
-		
-		public override void  Close()
-		{
-			lock (this)
-			{
-				if (stream == null)
-					throw new System.IO.IOException("Already closed");
-				
-				entries.Clear();
-				stream.Close();
-				stream = null;
-			}
-		}
 
-        /// <summary>
-        /// .NET
-        /// </summary>
-        public override void Dispose()
+	    public virtual Directory Directory
+	    {
+	        get { return directory; }
+	    }
+
+        [Obsolete("Use Directory property instead.")]
+        public virtual Directory GetDirectory()
         {
-            Close();
+            return Directory;
+        }
+
+	    public virtual string Name
+	    {
+	        get { return fileName; }
+	    }
+
+        [Obsolete("Use Name property instead.")]
+        public virtual System.String GetName()
+        {
+            return fileName;
+        }
+
+	    protected override void Dispose(bool disposing)
+        {
+            lock (this)
+            {
+                if (isDisposed) return;
+                if (disposing)
+                {
+                    if (entries != null)
+                    {
+                        entries.Clear();
+                    }
+                    if (stream != null)
+                    {
+                        stream.Close();
+                    }
+                }
+
+                entries = null;
+                stream = null;
+                isDisposed = true;
+            }
         }
 		
 		public override IndexInput OpenInput(System.String id)
@@ -160,7 +172,7 @@ namespace Lucene.Net.Index
 				if (stream == null)
 					throw new System.IO.IOException("Stream closed");
 				
-				FileEntry entry = (FileEntry) entries[id];
+				FileEntry entry = entries[id];
 				if (entry == null)
 					throw new System.IO.IOException("No sub-file with id " + id + " found");
 				
@@ -169,12 +181,9 @@ namespace Lucene.Net.Index
 		}
 		
 		/// <summary>Returns an array of strings, one for each file in the directory. </summary>
-        [Obsolete("Lucene.Net-2.9.1. This method overrides obsolete member Lucene.Net.Store.Directory.List()")]
-		public override System.String[] List()
+		public override System.String[] ListAll()
 		{
-			System.String[] res = new System.String[entries.Count];
-			entries.Keys.CopyTo(res, 0);
-			return res;
+		    return entries.Keys.ToArray();
 		}
 		
 		/// <summary>Returns true iff a file with the given name exists. </summary>
@@ -204,8 +213,7 @@ namespace Lucene.Net.Index
 		
 		/// <summary>Not implemented</summary>
 		/// <throws>  UnsupportedOperationException  </throws>
-        [Obsolete("Lucene.Net-2.9.1. This method overrides obsolete member Lucene.Net.Store.Directory.RenameFile(string, string)")]
-		public override void  RenameFile(System.String from, System.String to)
+		public void RenameFile(System.String from, System.String to)
 		{
 			throw new System.NotSupportedException();
 		}
@@ -214,7 +222,7 @@ namespace Lucene.Net.Index
 		/// <throws>  IOException if the file does not exist  </throws>
 		public override long FileLength(System.String name)
 		{
-			FileEntry e = (FileEntry) entries[name];
+			FileEntry e = entries[name];
 			if (e == null)
 				throw new System.IO.IOException("File " + name + " does not exist");
 			return e.length;
@@ -239,12 +247,13 @@ namespace Lucene.Net.Index
 		/// this helps with testing since JUnit test cases in a different class
 		/// can then access package fields of this class.
 		/// </summary>
-		public /*internal*/ sealed class CSIndexInput:BufferedIndexInput, System.ICloneable
+		public /*internal*/ sealed class CSIndexInput : BufferedIndexInput
 		{
-			
 			internal IndexInput base_Renamed;
 			internal long fileOffset;
 			internal long length;
+
+		    private bool isDisposed;
 			
 			internal CSIndexInput(IndexInput base_Renamed, long fileOffset, long length):this(base_Renamed, fileOffset, length, BufferedIndexInput.BUFFER_SIZE)
 			{
@@ -292,12 +301,21 @@ namespace Lucene.Net.Index
 			public override void  SeekInternal(long pos)
 			{
 			}
-			
-			/// <summary>Closes the stream to further operations. </summary>
-			public override void  Close()
-			{
-				base_Renamed.Close();
-			}
+
+            protected override void Dispose(bool disposing)
+            {
+                if (isDisposed) return;
+
+                if (disposing)
+                {
+                    if (base_Renamed != null)
+                    {
+                        base_Renamed.Close();
+                    }
+                }
+                
+                isDisposed = true;
+            }
 			
 			public override long Length()
 			{

Modified: incubator/lucene.net/trunk/src/core/Index/CompoundFileWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/CompoundFileWriter.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/CompoundFileWriter.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/CompoundFileWriter.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using System.Collections.Generic;
 using Directory = Lucene.Net.Store.Directory;
 using IndexInput = Lucene.Net.Store.IndexInput;
 using IndexOutput = Lucene.Net.Store.IndexOutput;
@@ -43,12 +43,8 @@ namespace Lucene.Net.Index
 	/// file. The {directory} that follows has that many entries. Each directory entry
 	/// contains a long pointer to the start of this file's data section, and a String
 	/// with that file's name.
-	/// 
-	/// 
 	/// </summary>
-	/// <version>  $Id: CompoundFileWriter.java 690539 2008-08-30 17:33:06Z mikemccand $
-	/// </version>
-	public sealed class CompoundFileWriter
+	public sealed class CompoundFileWriter : IDisposable
 	{
 		
 		private sealed class FileEntry
@@ -66,8 +62,8 @@ namespace Lucene.Net.Index
 		
 		private Directory directory;
 		private System.String fileName;
-        private System.Collections.Hashtable ids;
-		private System.Collections.ArrayList entries;
+        private HashSet<string> ids;
+		private LinkedList<FileEntry> entries;
 		private bool merged = false;
 		private SegmentMerger.CheckAbort checkAbort;
 		
@@ -88,23 +84,37 @@ namespace Lucene.Net.Index
 			this.checkAbort = checkAbort;
 			directory = dir;
 			fileName = name;
-            ids = new System.Collections.Hashtable();
-			entries = new System.Collections.ArrayList();
-		}
-		
-		/// <summary>Returns the directory of the compound file. </summary>
-		public Directory GetDirectory()
-		{
-			return directory;
+            ids = new HashSet<string>();
+			entries = new LinkedList<FileEntry>();
 		}
-		
-		/// <summary>Returns the name of the compound file. </summary>
-		public System.String GetName()
-		{
-			return fileName;
-		}
-		
-		/// <summary>Add a source stream. <c>file</c> is the string by which the 
+
+	    /// <summary>Returns the directory of the compound file. </summary>
+	    public Directory Directory
+	    {
+	        get { return directory; }
+	    }
+
+        /// <summary>Returns the directory of the compound file. </summary>
+        [Obsolete("Use Directory property instead")]
+        public Directory GetDirectory()
+        {
+            return Directory;
+        }
+
+	    /// <summary>Returns the name of the compound file. </summary>
+	    public string Name
+	    {
+	        get { return fileName; }
+	    }
+
+        /// <summary>Returns the name of the compound file. </summary>
+        [Obsolete("Use Name property instead")]
+        public System.String GetName()
+        {
+            return Name;
+        }
+
+	    /// <summary>Add a source stream. <c>file</c> is the string by which the 
 		/// sub-stream will be known in the compound stream.
 		/// 
 		/// </summary>
@@ -123,7 +133,7 @@ namespace Lucene.Net.Index
 			
             try
             {
-                ids.Add(file, file);
+                ids.Add(file);
             }
             catch (Exception)
             {
@@ -132,101 +142,105 @@ namespace Lucene.Net.Index
 			
 			FileEntry entry = new FileEntry();
 			entry.file = file;
-			entries.Add(entry);
+			entries.AddLast(entry);
 		}
 		
-		/// <summary>Merge files with the extensions added up to now.
-		/// All files with these extensions are combined sequentially into the
-		/// compound stream. After successful merge, the source files
-		/// are deleted.
-		/// </summary>
-		/// <throws>  IllegalStateException if close() had been called before or </throws>
-		/// <summary>   if no file has been added to this object
-		/// </summary>
+        [Obsolete("Use Dispose() instead")]
 		public void  Close()
 		{
-			if (merged)
-				throw new System.SystemException("Merge already performed");
-			
-			if ((entries.Count == 0))
-				throw new System.SystemException("No entries to merge have been defined");
-			
-			merged = true;
-			
-			// open the compound stream
-			IndexOutput os = null;
-			try
-			{
-				os = directory.CreateOutput(fileName);
-				
-				// Write the number of entries
-				os.WriteVInt(entries.Count);
-				
-				// Write the directory with all offsets at 0.
-				// Remember the positions of directory entries so that we can
-				// adjust the offsets later
-				System.Collections.IEnumerator it = entries.GetEnumerator();
-				long totalSize = 0;
-				while (it.MoveNext())
-				{
-					FileEntry fe = (FileEntry) it.Current;
-					fe.directoryOffset = os.GetFilePointer();
-					os.WriteLong(0); // for now
-					os.WriteString(fe.file);
-					totalSize += directory.FileLength(fe.file);
-				}
-				
-				// Pre-allocate size of file as optimization --
-				// this can potentially help IO performance as
-				// we write the file and also later during
-				// searching.  It also uncovers a disk-full
-				// situation earlier and hopefully without
-				// actually filling disk to 100%:
-				long finalLength = totalSize + os.GetFilePointer();
-				os.SetLength(finalLength);
-				
-				// Open the files and copy their data into the stream.
-				// Remember the locations of each file's data section.
-				byte[] buffer = new byte[16384];
-				it = entries.GetEnumerator();
-				while (it.MoveNext())
-				{
-					FileEntry fe = (FileEntry) it.Current;
-					fe.dataOffset = os.GetFilePointer();
-					CopyFile(fe, os, buffer);
-				}
-				
-				// Write the data offsets into the directory of the compound stream
-				it = entries.GetEnumerator();
-				while (it.MoveNext())
-				{
-					FileEntry fe = (FileEntry) it.Current;
-					os.Seek(fe.directoryOffset);
-					os.WriteLong(fe.dataOffset);
-				}
-				
-				System.Diagnostics.Debug.Assert(finalLength == os.Length());
-				
-				// Close the output stream. Set the os to null before trying to
-				// close so that if an exception occurs during the close, the
-				// finally clause below will not attempt to close the stream
-				// the second time.
-				IndexOutput tmp = os;
-				os = null;
-				tmp.Close();
-			}
-			finally
-			{
-				if (os != null)
-					try
-					{
-						os.Close();
-					}
-					catch (System.IO.IOException e)
-					{
-					}
-			}
+		    Dispose();
 		}
+
+        /// <summary>Merge files with the extensions added up to now.
+        /// All files with these extensions are combined sequentially into the
+        /// compound stream. After successful merge, the source files
+        /// are deleted.
+        /// </summary>
+        /// <throws>  IllegalStateException if close() had been called before or </throws>
+        /// <summary>   if no file has been added to this object
+        /// </summary>
+        public void Dispose()
+        {
+            // Extract into protected method if class ever becomes unsealed
+
+            // TODO: Dispose shouldn't throw exceptions!
+            if (merged)
+                throw new System.SystemException("Merge already performed");
+
+            if ((entries.Count == 0))
+                throw new System.SystemException("No entries to merge have been defined");
+
+            merged = true;
+
+            // open the compound stream
+            IndexOutput os = null;
+            try
+            {
+                os = directory.CreateOutput(fileName);
+
+                // Write the number of entries
+                os.WriteVInt(entries.Count);
+
+                // Write the directory with all offsets at 0.
+                // Remember the positions of directory entries so that we can
+                // adjust the offsets later
+                long totalSize = 0;
+                foreach (FileEntry fe in entries)
+                {
+                    fe.directoryOffset = os.GetFilePointer();
+                    os.WriteLong(0); // for now
+                    os.WriteString(fe.file);
+                    totalSize += directory.FileLength(fe.file);
+                }
+
+                // Pre-allocate size of file as optimization --
+                // this can potentially help IO performance as
+                // we write the file and also later during
+                // searching.  It also uncovers a disk-full
+                // situation earlier and hopefully without
+                // actually filling disk to 100%:
+                long finalLength = totalSize + os.GetFilePointer();
+                os.SetLength(finalLength);
+
+                // Open the files and copy their data into the stream.
+                // Remember the locations of each file's data section.
+                byte[] buffer = new byte[16384];
+                foreach (FileEntry fe in entries)
+                {
+                    fe.dataOffset = os.GetFilePointer();
+                    CopyFile(fe, os, buffer);
+                }
+
+                // Write the data offsets into the directory of the compound stream
+                foreach (FileEntry fe in entries)
+                {
+                    os.Seek(fe.directoryOffset);
+                    os.WriteLong(fe.dataOffset);
+                }
+
+                System.Diagnostics.Debug.Assert(finalLength == os.Length());
+
+                // Close the output stream. Set the os to null before trying to
+                // close so that if an exception occurs during the close, the
+                // finally clause below will not attempt to close the stream
+                // the second time.
+                IndexOutput tmp = os;
+                os = null;
+                tmp.Close();
+            }
+            finally
+            {
+                if (os != null)
+                    try
+                    {
+                        os.Close();
+                    }
+                    catch (System.IO.IOException e)
+                    {
+                    }
+            }
+        }
+
 		
 		/// <summary>Copy the contents of the file with specified extension into the
 		/// provided output stream. Use the provided buffer for moving data

Modified: incubator/lucene.net/trunk/src/core/Index/ConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/ConcurrentMergeScheduler.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/ConcurrentMergeScheduler.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/ConcurrentMergeScheduler.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using System.Collections.Generic;
+using Lucene.Net.Support;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Index
@@ -36,11 +37,11 @@ namespace Lucene.Net.Index
 	{
 		
 		private int mergeThreadPriority = - 1;
-		
-		protected internal System.Collections.IList mergeThreads = new System.Collections.ArrayList();
+
+        protected internal IList<MergeThread> mergeThreads = new List<MergeThread>();
 		
 		// Max number of threads allowed to be merging at once
-		private int maxThreadCount = 1;
+		private int _maxThreadCount = 1;
 		
 		protected internal Directory dir;
 		
@@ -56,29 +57,45 @@ namespace Lucene.Net.Index
 				AddMyself();
 			}
 		}
-		
-		/// <summary>Sets the max # simultaneous threads that may be
-		/// running.  If a merge is necessary yet we already have
-		/// this many threads running, the incoming thread (that
-		/// is calling add/updateDocument) will block until
-		/// a merge thread has completed. 
-		/// </summary>
-		public virtual void  SetMaxThreadCount(int count)
-		{
-			if (count < 1)
-				throw new System.ArgumentException("count should be at least 1");
-			maxThreadCount = count;
-		}
-		
-		/// <summary>Get the max # simultaneous threads that may be</summary>
-		/// <seealso cref="SetMaxThreadCount">
-		/// </seealso>
-		public virtual int GetMaxThreadCount()
-		{
-			return maxThreadCount;
-		}
-		
-		/// <summary>Return the priority that merge threads run at.  By
+
+	    /// <summary>Gets or sets the max # simultaneous threads that may be
+	    /// running.  If a merge is necessary yet we already have
+	    /// this many threads running, the incoming thread (that
+	    /// is calling add/updateDocument) will block until
+	    /// a merge thread has completed. 
+	    /// </summary>
+	    public virtual int MaxThreadCount
+	    {
+	        set
+	        {
+	            if (value < 1)
+	                throw new System.ArgumentException("count should be at least 1");
+	            _maxThreadCount = value;
+	        }
+	        get { return _maxThreadCount; }
+        }
+        
+        /// <summary>Sets the max # simultaneous threads that may be
+        /// running.  If a merge is necessary yet we already have
+        /// this many threads running, the incoming thread (that
+        /// is calling add/updateDocument) will block until
+        /// a merge thread has completed. 
+        /// </summary>
+        [Obsolete("Use MaxThreadCount property instead.")]
+        public virtual void SetMaxThreadCount(int count)
+        {
+            MaxThreadCount = count;
+        }
+
+        /// <summary>Get the max # simultaneous threads that may be</summary>
+        /// <seealso cref="SetMaxThreadCount" />
+        [Obsolete("Use MaxThreadCount property instead.")]
+        public virtual int GetMaxThreadCount()
+        {
+            return MaxThreadCount;
+        }
+
+	    /// <summary>Return the priority that merge threads run at.  By
 		/// default the priority is 1 plus the priority of (ie,
 		/// slightly higher priority than) the first thread that
 		/// calls merge. 
@@ -92,7 +109,7 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		/// <summary>Return the priority that merge threads run at. </summary>
+		/// <summary>Set the priority that merge threads run at. </summary>
 		public virtual void  SetMergeThreadPriority(int pri)
 		{
 			lock (this)
@@ -104,7 +121,7 @@ namespace Lucene.Net.Index
 				int numThreads = MergeThreadCount();
 				for (int i = 0; i < numThreads; i++)
 				{
-					MergeThread merge = (MergeThread) mergeThreads[i];
+					MergeThread merge = mergeThreads[i];
 					merge.SetThreadPriority(pri);
 				}
 			}
@@ -129,17 +146,20 @@ namespace Lucene.Net.Index
 				{
 					// Default to slightly higher priority than our
 					// calling thread
-					mergeThreadPriority = 1 + (System.Int32) SupportClass.ThreadClass.Current().Priority;
+					mergeThreadPriority = 1 + (System.Int32) ThreadClass.Current().Priority;
 					if (mergeThreadPriority > (int) System.Threading.ThreadPriority.Highest)
 						mergeThreadPriority = (int) System.Threading.ThreadPriority.Highest;
 				}
 			}
 		}
 		
-		public override void  Close()
-		{
-			closed = true;
-		}
+        protected override void Dispose(bool disposing)
+        {
+            //if (disposing)
+            //{
+                closed = true;
+            //}
+        }
 		
 		public virtual void  Sync()
 		{
@@ -153,7 +173,7 @@ namespace Lucene.Net.Index
 					if (Verbose())
 					{
 						for (int i = 0; i < count; i++)
-							Message("    " + i + ": " + ((MergeThread) mergeThreads[i]));
+							Message("    " + i + ": " + mergeThreads[i]);
 					}
 					
 					try
@@ -162,10 +182,11 @@ namespace Lucene.Net.Index
 					}
 					catch (System.Threading.ThreadInterruptedException ie)
 					{
-						// In 3.0 we will change this to throw
-						// InterruptedException instead
-						SupportClass.ThreadClass.Current().Interrupt();
-						throw new System.SystemException(ie.Message, ie);
+                        //// In 3.0 we will change this to throw
+                        //// InterruptedException instead
+                        //SupportClass.ThreadClass.Current().Interrupt();
+                        //throw new System.SystemException(ie.Message, ie);
+					    throw;
 					}
 				}
 			}
@@ -175,26 +196,13 @@ namespace Lucene.Net.Index
 		{
             lock (this)
             {
-                return MergeThreadCount(false);
-            }
-		}
-
-        private int MergeThreadCount(bool excludeDone)
-        {
-            lock (this)
-            {
                 int count = 0;
                 int numThreads = mergeThreads.Count;
                 for (int i = 0; i < numThreads; i++)
                 {
-                    MergeThread t = (MergeThread)mergeThreads[i];
-                    if (t.IsAlive)
+                    if (mergeThreads[i].IsAlive)
                     {
-                        MergePolicy.OneMerge runningMerge = t.GetRunningMerge();
-                        if (!excludeDone || (runningMerge != null && !runningMerge.mergeDone))
-                        {
                             count++;
-                        }
                     }
                 }
                 return count;
@@ -203,8 +211,7 @@ namespace Lucene.Net.Index
 		
 		public override void  Merge(IndexWriter writer)
 		{
-			
-			// TODO: enable this once we are on JRE 1.5
+			// TODO: .NET doesn't support this
 			// assert !Thread.holdsLock(writer);
 			
 			this.writer = writer;
@@ -230,7 +237,6 @@ namespace Lucene.Net.Index
 			// pending merges, until it's empty:
 			while (true)
 			{
-				
 				// TODO: we could be careful about which merges to do in
 				// the BG (eg maybe the "biggest" ones) vs FG, which
 				// merges to do first (the easiest ones?), etc.
@@ -253,7 +259,7 @@ namespace Lucene.Net.Index
 					lock (this)
 					{
 						MergeThread merger;
-						while (MergeThreadCount(true) >= maxThreadCount)
+						while (MergeThreadCount() >= _maxThreadCount)
 						{
 							if (Verbose())
 								Message("    too many merge threads running; stalling...");
@@ -263,16 +269,18 @@ namespace Lucene.Net.Index
 							}
 							catch (System.Threading.ThreadInterruptedException ie)
 							{
-								// In 3.0 we will change this to throw
-								// InterruptedException instead
-								SupportClass.ThreadClass.Current().Interrupt();
-								throw new System.SystemException(ie.Message, ie);
+                                //// In 3.0 we will change this to throw
+                                //// InterruptedException instead
+                                //SupportClass.ThreadClass.Current().Interrupt();
+                                //throw new System.SystemException(ie.Message, ie);
+							    throw;
 							}
 						}
 						
 						if (Verbose())
 							Message("  consider merge " + merge.SegString(dir));
-						
+
+					    System.Diagnostics.Debug.Assert(MergeThreadCount() < _maxThreadCount);
 												
 						// OK to spawn a new merge thread to handle this
 						// merge:
@@ -314,7 +322,7 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		public /*protected internal*/ class MergeThread:SupportClass.ThreadClass
+		public /*protected internal*/ class MergeThread:ThreadClass
 		{
 			private void  InitBlock(ConcurrentMergeScheduler enclosingInstance)
 			{
@@ -462,9 +470,10 @@ namespace Lucene.Net.Index
 			}
 			catch (System.Threading.ThreadInterruptedException ie)
 			{
-				SupportClass.ThreadClass.Current().Interrupt();
-				// In 3.0 this will throw InterruptedException
-				throw new System.SystemException(ie.Message, ie);
+                //SupportClass.ThreadClass.Current().Interrupt();
+                //// In 3.0 this will throw InterruptedException
+                //throw new System.SystemException(ie.Message, ie);
+			    throw;
 			}
 			throw new MergePolicy.MergeException(exc, dir);
 		}
@@ -478,13 +487,13 @@ namespace Lucene.Net.Index
 			{
 				throw new System.SystemException("setTestMode() was not called; often this is because your test case's setUp method fails to call super.setUp in LuceneTestCase");
 			}
-			lock (allInstances.SyncRoot)
+			lock (allInstances)
 			{
 				int count = allInstances.Count;
 				// Make sure all outstanding threads are done so we see
 				// any exceptions they may produce:
 				for (int i = 0; i < count; i++)
-					((ConcurrentMergeScheduler) allInstances[i]).Sync();
+				    allInstances[i].Sync();
 				bool v = anyExceptions;
 				anyExceptions = false;
 				return v;
@@ -493,7 +502,7 @@ namespace Lucene.Net.Index
 		
 		public static void  ClearUnhandledExceptions()
 		{
-			lock (allInstances.SyncRoot)
+			lock (allInstances)
 			{
 				anyExceptions = false;
 			}
@@ -502,19 +511,19 @@ namespace Lucene.Net.Index
 		/// <summary>Used for testing </summary>
 		private void  AddMyself()
 		{
-			lock (allInstances.SyncRoot)
+			lock (allInstances)
 			{
 				int size = allInstances.Count;
 				int upto = 0;
 				for (int i = 0; i < size; i++)
 				{
-					ConcurrentMergeScheduler other = (ConcurrentMergeScheduler) allInstances[i];
+					ConcurrentMergeScheduler other = allInstances[i];
 					if (!(other.closed && 0 == other.MergeThreadCount()))
 					// Keep this one for now: it still has threads or
 					// may spawn new threads
 						allInstances[upto++] = other;
 				}
-				((System.Collections.IList) ((System.Collections.ArrayList) allInstances).GetRange(upto, allInstances.Count - upto)).Clear();
+			    allInstances.RemoveRange(upto, allInstances.Count - upto);
 				allInstances.Add(this);
 			}
 		}
@@ -534,10 +543,10 @@ namespace Lucene.Net.Index
 		}
 		
 		/// <summary>Used for testing </summary>
-		private static System.Collections.IList allInstances;
+		private static List<ConcurrentMergeScheduler> allInstances;
 		public static void  SetTestMode()
 		{
-			allInstances = new System.Collections.ArrayList();
+			allInstances = new List<ConcurrentMergeScheduler>();
 		}
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/trunk/src/core/Index/DefaultSkipListReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/trunk/src/core/Index/DefaultSkipListReader.cs?rev=1294875&r1=1294874&r2=1294875&view=diff
==============================================================================
--- incubator/lucene.net/trunk/src/core/Index/DefaultSkipListReader.cs (original)
+++ incubator/lucene.net/trunk/src/core/Index/DefaultSkipListReader.cs Tue Feb 28 22:43:08 2012
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using IndexInput = Lucene.Net.Store.IndexInput;
 
 namespace Lucene.Net.Index
@@ -114,7 +114,7 @@ namespace Lucene.Net.Index
 				{
 					payloadLength[level] = skipStream.ReadVInt();
 				}
-				delta = SupportClass.Number.URShift(delta, 1);
+				delta = Number.URShift(delta, 1);
 			}
 			else
 			{



Mime
View raw message