lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ccurr...@apache.org
Subject [Lucene.Net] svn commit: r1201243 [3/8] - in /incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk: src/core/ src/core/Analysis/ src/core/Analysis/Standard/ src/core/Analysis/Tokenattributes/ src/core/Document/ src/core/Index/ src/core/QueryParser/ src/core/Search/...
Date Sat, 12 Nov 2011 10:44:27 GMT
Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Token.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Token.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Token.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Token.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,8 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
+using Lucene.Net.Util;
 using FlagsAttribute = Lucene.Net.Analysis.Tokenattributes.FlagsAttribute;
 using OffsetAttribute = Lucene.Net.Analysis.Tokenattributes.OffsetAttribute;
 using PayloadAttribute = Lucene.Net.Analysis.Tokenattributes.PayloadAttribute;
@@ -57,21 +58,8 @@ namespace Lucene.Net.Analysis
 	/// Even though it is not necessary to use Token anymore, with the new TokenStream API it can
 	/// be used as convenience class that implements all <see cref="Attribute" />s, which is especially useful
 	/// to easily switch from the old to the new TokenStream API.
-	/// </summary>
-	/// <summary><br/><br/>
-	/// <p/><b>NOTE:</b> As of 2.3, Token stores the term text
-	/// internally as a malleable char[] termBuffer instead of
-	/// String termText.  The indexing code and core tokenizers
-	/// have been changed to re-use a single Token instance, changing
-	/// its buffer and other fields in-place as the Token is
-	/// processed.  This provides substantially better indexing
-	/// performance as it saves the GC cost of new'ing a Token and
-	/// String for every term.  The APIs that accept String
-	/// termText are still available but a warning about the
-	/// associated performance cost has been added (below).  The
-	/// <see cref="TermText()" /> method has been deprecated.<p/>
-	/// </summary>
-	/// <summary><p/>Tokenizers and TokenFilters should try to re-use a Token instance when
+	/// <br/><br/>
+	/// <p/>Tokenizers and TokenFilters should try to re-use a Token instance when
 	/// possible for best performance, by implementing the
 	/// <see cref="TokenStream.IncrementToken()" /> API.
 	/// Failing that, to create a new Token you should first use
@@ -135,68 +123,17 @@ namespace Lucene.Net.Analysis
 	[Serializable]
 	public class Token:AttributeImpl, System.ICloneable, TermAttribute, TypeAttribute, PositionIncrementAttribute, FlagsAttribute, OffsetAttribute, PayloadAttribute
 	{
-		
 		public const System.String DEFAULT_TYPE = "word";
 		
 		private static int MIN_BUFFER_SIZE = 10;
-		
-		/// <deprecated> We will remove this when we remove the
-		/// deprecated APIs 
-		/// </deprecated>
-        [Obsolete("We will remove this when we remove the deprecated APIs")]
-		private System.String termText;
-		
-		/// <summary> Characters for the term text.</summary>
-		/// <deprecated> This will be made private. Instead, use:
-		/// <see cref="TermBuffer()" />, 
-		/// <see cref="SetTermBuffer(char[], int, int)" />,
-		/// <see cref="SetTermBuffer(String)" />, or
-		/// <see cref="SetTermBuffer(String, int, int)" />
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: TermBuffer(), SetTermBuffer(char[], int, int), SetTermBuffer(String) or SetTermBuffer(String, int, int)")]
-		internal char[] termBuffer;
-		
-		/// <summary> Length of term text in the buffer.</summary>
-		/// <deprecated> This will be made private. Instead, use:
-        /// <see cref="TermLength()" />, or <see cref="SetTermLength(int)"/>.
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: TermLength(), or setTermLength(int)")]
-		internal int termLength;
-		
-		/// <summary> Start in source text.</summary>
-		/// <deprecated> This will be made private. Instead, use:
-        /// <see cref="StartOffset()" />, or <see cref="SetStartOffset(int)"/>.
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: StartOffset(), or SetStartOffset(int).")]
-		internal int startOffset;
-		
-		/// <summary> End in source text.</summary>
-		/// <deprecated> This will be made private. Instead, use:
-        /// <see cref="EndOffset()" />, or <see cref="SetEndOffset(int)"/>.
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: EndOffset(), or SetEndOffset(int).")]
-		internal int endOffset;
-		
-		/// <summary> The lexical type of the token.</summary>
-		/// <deprecated> This will be made private. Instead, use:
-        /// <see cref="Type()" />, or <see cref="SetType(String)"/>.
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: Type(), or SetType(String).")]
-		internal System.String type = DEFAULT_TYPE;
-		
+
+        private char[] termBuffer;
+		private int termLength;
+		private int startOffset, endOffset;
+		private string type = DEFAULT_TYPE;
 		private int flags;
-		
-		/// <deprecated> This will be made private. Instead, use:
-        /// <see cref="GetPayload()" />, or <see cref="SetPayload(Payload)"/>.
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: GetPayload(), or SetPayload(Payload).")]
-		internal Payload payload;
-		
-		/// <deprecated> This will be made private. Instead, use:
-        /// <see cref="GetPositionIncrement()" />, or <see cref="SetPositionIncrement(int)"/>.
-		/// </deprecated>
-        [Obsolete("This will be made private. Instead, use: GetPositionIncrement(), or SetPositionIncrement(int).")]
-		internal int positionIncrement = 1;
+		private Payload payload;
+		private int positionIncrement = 1;
 		
 		/// <summary>Constructs a Token will null text. </summary>
 		public Token()
@@ -206,10 +143,8 @@ namespace Lucene.Net.Analysis
 		/// <summary>Constructs a Token with null text and start &amp; end
 		/// offsets.
 		/// </summary>
-		/// <param name="start">start offset in the source text
-		/// </param>
-		/// <param name="end">end offset in the source text 
-		/// </param>
+		/// <param name="start">start offset in the source text</param>
+		/// <param name="end">end offset in the source text</param>
 		public Token(int start, int end)
 		{
 			startOffset = start;
@@ -219,12 +154,9 @@ namespace Lucene.Net.Analysis
 		/// <summary>Constructs a Token with null text and start &amp; end
 		/// offsets plus the Token type.
 		/// </summary>
-		/// <param name="start">start offset in the source text
-		/// </param>
-		/// <param name="end">end offset in the source text
-		/// </param>
-		/// <param name="typ">the lexical type of this Token 
-		/// </param>
+		/// <param name="start">start offset in the source text</param>
+		/// <param name="end">end offset in the source text</param>
+		/// <param name="typ">the lexical type of this Token</param>
 		public Token(int start, int end, System.String typ)
 		{
 			startOffset = start;
@@ -235,12 +167,9 @@ namespace Lucene.Net.Analysis
 		/// <summary> Constructs a Token with null text and start &amp; end
 		/// offsets plus flags. NOTE: flags is EXPERIMENTAL.
 		/// </summary>
-		/// <param name="start">start offset in the source text
-		/// </param>
-		/// <param name="end">end offset in the source text
-		/// </param>
-		/// <param name="flags">The bits to set for this token
-		/// </param>
+		/// <param name="start">start offset in the source text</param>
+		/// <param name="end">end offset in the source text</param>
+		/// <param name="flags">The bits to set for this token</param>
 		public Token(int start, int end, int flags)
 		{
 			startOffset = start;
@@ -254,15 +183,12 @@ namespace Lucene.Net.Analysis
 		/// instead use the char[] termBuffer methods to set the
 		/// term text.
 		/// </summary>
-		/// <param name="text">term text
-		/// </param>
-		/// <param name="start">start offset
-		/// </param>
-		/// <param name="end">end offset
-		/// </param>
+		/// <param name="text">term text</param>
+		/// <param name="start">start offset</param>
+		/// <param name="end">end offset</param>
 		public Token(System.String text, int start, int end)
 		{
-			termText = text;
+		    SetTermBuffer(text);
 			startOffset = start;
 			endOffset = end;
 		}
@@ -272,17 +198,13 @@ namespace Lucene.Net.Analysis
 		/// speed you should instead use the char[] termBuffer
 		/// methods to set the term text.
 		/// </summary>
-		/// <param name="text">term text
-		/// </param>
-		/// <param name="start">start offset
-		/// </param>
-		/// <param name="end">end offset
-		/// </param>
-		/// <param name="typ">token type
-		/// </param>
+		/// <param name="text">term text</param>
+		/// <param name="start">start offset</param>
+		/// <param name="end">end offset</param>
+		/// <param name="typ">token type</param>
 		public Token(System.String text, int start, int end, System.String typ)
 		{
-			termText = text;
+		    SetTermBuffer(text);
 			startOffset = start;
 			endOffset = end;
 			type = typ;
@@ -293,17 +215,13 @@ namespace Lucene.Net.Analysis
 		/// speed you should instead use the char[] termBuffer
 		/// methods to set the term text.
 		/// </summary>
-		/// <param name="text">
-		/// </param>
-		/// <param name="start">
-		/// </param>
-		/// <param name="end">
-		/// </param>
-		/// <param name="flags">token type bits
-		/// </param>
+		/// <param name="text"></param>
+		/// <param name="start"></param>
+		/// <param name="end"></param>
+		/// <param name="flags">token type bits</param>
 		public Token(System.String text, int start, int end, int flags)
 		{
-			termText = text;
+		    SetTermBuffer(text);
 			startOffset = start;
 			endOffset = end;
 			this.flags = flags;
@@ -313,16 +231,11 @@ namespace Lucene.Net.Analysis
 		/// &amp; length), start and end
 		/// offsets
 		/// </summary>
-		/// <param name="startTermBuffer">
-		/// </param>
-		/// <param name="termBufferOffset">
-		/// </param>
-		/// <param name="termBufferLength">
-		/// </param>
-		/// <param name="start">
-		/// </param>
-		/// <param name="end">
-		/// </param>
+		/// <param name="startTermBuffer"></param>
+		/// <param name="termBufferOffset"></param>
+		/// <param name="termBufferLength"></param>
+		/// <param name="start"></param>
+		/// <param name="end"></param>
 		public Token(char[] startTermBuffer, int termBufferOffset, int termBufferLength, int start, int end)
 		{
 			SetTermBuffer(startTermBuffer, termBufferOffset, termBufferLength);
@@ -373,37 +286,6 @@ namespace Lucene.Net.Analysis
 			return positionIncrement;
 		}
 		
-		/// <summary>Sets the Token's term text.  <b>NOTE:</b> for better
-		/// indexing speed you should instead use the char[]
-		/// termBuffer methods to set the term text.
-		/// </summary>
-		/// <deprecated> use <see cref="SetTermBuffer(char[], int, int)" /> or
-		/// <see cref="SetTermBuffer(String)" /> or
-		/// <see cref="SetTermBuffer(String, int, int)" />.
-		/// </deprecated>
-        [Obsolete("Use SetTermBuffer(char[], int, int) or SetTermBuffer(String) or SetTermBuffer(String, int, int)")]
-		public virtual void  SetTermText(System.String text)
-		{
-			termText = text;
-			termBuffer = null;
-		}
-		
-		/// <summary>Returns the Token's term text.
-		/// 
-		/// </summary>
-		/// <deprecated> This method now has a performance penalty
-		/// because the text is stored internally in a char[].  If
-		/// possible, use <see cref="TermBuffer()" /> and <see cref="TermLength()"/>
-		/// directly instead.  If you really need a
-		/// String, use <see cref="Term()" />
-		/// </deprecated>
-		public System.String TermText()
-		{
-			if (termText == null && termBuffer != null)
-				termText = new System.String(termBuffer, 0, termLength);
-			return termText;
-		}
-		
 		/// <summary>Returns the Token's term text.
 		/// 
 		/// This method has a performance penalty
@@ -415,8 +297,6 @@ namespace Lucene.Net.Analysis
 		/// </summary>
 		public System.String Term()
 		{
-			if (termText != null)
-				return termText;
 			InitTermBuffer();
 			return new System.String(termBuffer, 0, termLength);
 		}
@@ -424,15 +304,11 @@ namespace Lucene.Net.Analysis
 		/// <summary>Copies the contents of buffer, starting at offset for
 		/// length characters, into the termBuffer array.
 		/// </summary>
-		/// <param name="buffer">the buffer to copy
-		/// </param>
-		/// <param name="offset">the index in the buffer of the first character to copy
-		/// </param>
-		/// <param name="length">the number of characters to copy
-		/// </param>
+		/// <param name="buffer">the buffer to copy</param>
+		/// <param name="offset">the index in the buffer of the first character to copy</param>
+		/// <param name="length">the number of characters to copy</param>
 		public void  SetTermBuffer(char[] buffer, int offset, int length)
 		{
-			termText = null;
 			GrowTermBuffer(length);
 			Array.Copy(buffer, offset, termBuffer, 0, length);
 			termLength = length;
@@ -443,10 +319,9 @@ namespace Lucene.Net.Analysis
 		/// </param>
 		public void  SetTermBuffer(System.String buffer)
 		{
-			termText = null;
 			int length = buffer.Length;
 			GrowTermBuffer(length);
-			SupportClass.TextSupport.GetCharsFromString(buffer, 0, length, termBuffer, 0);
+			TextSupport.GetCharsFromString(buffer, 0, length, termBuffer, 0);
 			termLength = length;
 		}
 		
@@ -463,9 +338,8 @@ namespace Lucene.Net.Analysis
 		{
 			System.Diagnostics.Debug.Assert(offset <= buffer.Length);
 			System.Diagnostics.Debug.Assert(offset + length <= buffer.Length);
-			termText = null;
 			GrowTermBuffer(length);
-			SupportClass.TextSupport.GetCharsFromString(buffer, offset, offset + length, termBuffer, 0);
+			TextSupport.GetCharsFromString(buffer, offset, offset + length, termBuffer, 0);
 			termLength = length;
 		}
 		
@@ -499,22 +373,7 @@ namespace Lucene.Net.Analysis
 		{
 			if (termBuffer == null)
 			{
-				// The buffer is always at least MIN_BUFFER_SIZE
-				newSize = newSize < MIN_BUFFER_SIZE?MIN_BUFFER_SIZE:newSize;
-				//Preserve termText 
-				if (termText != null)
-				{
-					int ttLen = termText.Length;
-					newSize = newSize < ttLen?ttLen:newSize;
-					termBuffer = new char[ArrayUtil.GetNextSize(newSize)];
-					SupportClass.TextSupport.GetCharsFromString(termText, 0, termText.Length, termBuffer, 0);
-					termText = null;
-				}
-				else
-				{
-					// no term Text, the first allocation
-					termBuffer = new char[ArrayUtil.GetNextSize(newSize)];
-				}
+                termBuffer = new char[ArrayUtil.GetNextSize(newSize < MIN_BUFFER_SIZE ? MIN_BUFFER_SIZE : newSize)];
 			}
 			else
 			{
@@ -553,33 +412,12 @@ namespace Lucene.Net.Analysis
 			}
 		}
 		
-		
-		// TODO: once we remove the deprecated termText() method
-		// and switch entirely to char[] termBuffer we don't need
-		// to use this method anymore, only for late init of the buffer
 		private void  InitTermBuffer()
 		{
 			if (termBuffer == null)
 			{
-				if (termText == null)
-				{
-					termBuffer = new char[ArrayUtil.GetNextSize(MIN_BUFFER_SIZE)];
-					termLength = 0;
-				}
-				else
-				{
-					int length = termText.Length;
-					if (length < MIN_BUFFER_SIZE)
-						length = MIN_BUFFER_SIZE;
-					termBuffer = new char[ArrayUtil.GetNextSize(length)];
-					termLength = termText.Length;
-					SupportClass.TextSupport.GetCharsFromString(termText, 0, termText.Length, termBuffer, 0);
-					termText = null;
-				}
-			}
-			else
-			{
-				termText = null;
+                termBuffer = new char[ArrayUtil.GetNextSize(MIN_BUFFER_SIZE)];
+                termLength = 0;
 			}
 		}
 		
@@ -611,7 +449,7 @@ namespace Lucene.Net.Analysis
 		/// <summary>Returns this Token's starting offset, the position of the first character
 		/// corresponding to this token in the source text.
 		/// Note that the difference between endOffset() and startOffset() may not be
-		/// equal to termText.length(), as the term text may have been altered by a
+		/// equal to <see cref="TermLength"/>, as the term text may have been altered by a
 		/// stemmer or some other filter. 
 		/// </summary>
 		public int StartOffset()
@@ -727,7 +565,6 @@ namespace Lucene.Net.Analysis
 			payload = null;
 			// Leave termBuffer to allow re-use
 			termLength = 0;
-			termText = null;
 			positionIncrement = 1;
 			flags = 0;
 			startOffset = endOffset = 0;
@@ -997,11 +834,6 @@ namespace Lucene.Net.Analysis
 				{
 					to.payload = (Payload) payload.Clone();
 				}
-				// remove the following optimization in 3.0 when old TokenStream API removed:
-			}
-			else if (target is TokenWrapper)
-			{
-				((TokenWrapper) target).delegate_Renamed = (Token) this.Clone();
 			}
 			else
 			{
@@ -1014,5 +846,57 @@ namespace Lucene.Net.Analysis
 				((TypeAttribute) target).SetType(type);
 			}
 		}
-	}
+       
+        ///<summary>
+        /// Convenience factory that returns <code>Token</code> as implementation for the basic
+        /// attributes and return the default impl (with &quot;Impl&quot; appended) for all other
+        /// attributes.
+        /// @since 3.0
+        /// </summary>
+	    public static AttributeSource.AttributeFactory TOKEN_ATTRIBUTE_FACTORY =
+	        new TokenAttributeFactory(AttributeSource.AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY);
+  
+        /// <summary>
+        /// <b>Expert</b>: Creates an AttributeFactory returning {@link Token} as instance for the basic attributes
+        /// and for all other attributes calls the given delegate factory.
+        /// </summary>
+        public class TokenAttributeFactory : AttributeSource.AttributeFactory
+        {
+
+            private AttributeSource.AttributeFactory _delegateFactory;
+
+            /// <summary>
+            /// <b>Expert</b>: Creates an AttributeFactory returning {@link Token} as instance for the basic attributes
+            /// and for all other attributes calls the given delegate factory.
+            /// </summary>
+            public TokenAttributeFactory(AttributeSource.AttributeFactory delegateFactory)
+            {
+                this._delegateFactory = delegateFactory;
+            }
+
+            public override AttributeImpl CreateAttributeInstance<T>()
+            {
+                return typeof(T).IsAssignableFrom(typeof(Token))
+                           ? new Token()
+                           : _delegateFactory.CreateAttributeInstance<T>();
+            }
+
+            public override bool Equals(Object other)
+            {
+                if (this == other) return true;
+
+                if (other is TokenAttributeFactory)
+                {
+                    TokenAttributeFactory af = (TokenAttributeFactory)other;
+                    return this._delegateFactory.Equals(af._delegateFactory);
+                }
+                return false;
+            }
+
+            public override int GetHashCode()
+            {
+                return _delegateFactory.GetHashCode() ^ 0x0a45aa31;
+            }
+        }
+    }
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/TokenStream.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/TokenStream.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/TokenStream.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/TokenStream.cs Sat Nov 12 10:44:21 2011
@@ -151,6 +151,12 @@ namespace Lucene.Net.Analysis
 		/// <summary>Releases resources associated with this stream. </summary>
 		public virtual void  Close()
 		{
+            Dispose();
 		}
+
+        public virtual void Dispose()
+        {
+            
+        }
 	}
 }
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenattributes/TermAttributeImpl.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenattributes/TermAttributeImpl.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenattributes/TermAttributeImpl.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenattributes/TermAttributeImpl.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
 using AttributeImpl = Lucene.Net.Util.AttributeImpl;
 
@@ -70,7 +70,7 @@ namespace Lucene.Net.Analysis.Tokenattri
 		{
 			int length = buffer.Length;
 			GrowTermBuffer(length);
-			SupportClass.TextSupport.GetCharsFromString(buffer, 0, length, termBuffer, 0);
+			TextSupport.GetCharsFromString(buffer, 0, length, termBuffer, 0);
 			termLength = length;
 		}
 		
@@ -88,7 +88,7 @@ namespace Lucene.Net.Analysis.Tokenattri
 			System.Diagnostics.Debug.Assert(offset <= buffer.Length);
 			System.Diagnostics.Debug.Assert(offset + length <= buffer.Length);
 			GrowTermBuffer(length);
-			SupportClass.TextSupport.GetCharsFromString(buffer, offset, offset + length, termBuffer, 0);
+			TextSupport.GetCharsFromString(buffer, offset, offset + length, termBuffer, 0);
 			termLength = length;
 		}
 		

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenizer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenizer.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenizer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/Tokenizer.cs Sat Nov 12 10:44:21 2011
@@ -26,10 +26,8 @@ namespace Lucene.Net.Analysis
 	/// <p/>
 	/// This is an abstract class; subclasses must override <see cref="TokenStream.IncrementToken()" />
 	/// <p/>
-    /// NOTE: Subclasses overriding <see cref="TokenStream.Next(Token)" /> must call
+    /// NOTE: Subclasses overriding <see cref="TokenStream.IncrementToken()" /> must call
 	/// <see cref="AttributeSource.ClearAttributes()" /> before setting attributes.
-    /// Subclasses overriding <see cref="TokenStream.IncrementToken()" /> must call
-	/// <see cref="Token.Clear()" /> before setting Token attributes.
 	/// </summary>
 	
 	public abstract class Tokenizer:TokenStream

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/WordlistLoader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/WordlistLoader.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/WordlistLoader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Analysis/WordlistLoader.cs Sat Nov 12 10:44:21 2011
@@ -16,16 +16,12 @@
  */
 
 using System;
+using System.Collections.Generic;
 
 namespace Lucene.Net.Analysis
 {
 	
-	/// <summary> Loader for text files that represent a list of stopwords.
-	/// 
-	/// 
-	/// </summary>
-	/// <version>  $Id: WordlistLoader.java 706342 2008-10-20 17:19:29Z gsingers $
-	/// </version>
+	/// <summary> Loader for text files that represent a list of stopwords.</summary>
 	public class WordlistLoader
 	{
 		
@@ -33,15 +29,12 @@ namespace Lucene.Net.Analysis
 		/// leading and trailing whitespace). Every line of the file should contain only
 		/// one word. The words need to be in lowercase if you make use of an
 		/// Analyzer which uses LowerCaseFilter (like StandardAnalyzer).
-		/// 
 		/// </summary>
-		/// <param name="wordfile">File containing the wordlist
-		/// </param>
-		/// <returns> A HashSet with the file's words
-		/// </returns>
-		public static System.Collections.Hashtable GetWordSet(System.IO.FileInfo wordfile)
+		/// <param name="wordfile">File containing the wordlist</param>
+		/// <returns> A HashSet with the file's words</returns>
+		public static ISet<string> GetWordSet(System.IO.FileInfo wordfile)
 		{
-			System.Collections.Hashtable result = new System.Collections.Hashtable();
+			ISet<string> result = new HashSet<string>();
 			System.IO.StreamReader reader = null;
 			try
 			{
@@ -60,17 +53,13 @@ namespace Lucene.Net.Analysis
 		/// leading and trailing whitespace). Every line of the file should contain only
 		/// one word. The words need to be in lowercase if you make use of an
 		/// Analyzer which uses LowerCaseFilter (like StandardAnalyzer).
-		/// 
 		/// </summary>
-		/// <param name="wordfile">File containing the wordlist
-		/// </param>
-		/// <param name="comment">The comment string to ignore
-		/// </param>
-		/// <returns> A HashSet with the file's words
-		/// </returns>
-		public static System.Collections.Hashtable GetWordSet(System.IO.FileInfo wordfile, System.String comment)
+		/// <param name="wordfile">File containing the wordlist</param>
+		/// <param name="comment">The comment string to ignore</param>
+		/// <returns> A HashSet with the file's words</returns>
+		public static HashSet<string> GetWordSet(System.IO.FileInfo wordfile, System.String comment)
 		{
-			System.Collections.Hashtable result = new System.Collections.Hashtable();
+			HashSet<string> result = new HashSet<string>();
 			System.IO.StreamReader reader = null;
 			try
 			{
@@ -90,22 +79,19 @@ namespace Lucene.Net.Analysis
 		/// leading and trailing whitespace). Every line of the Reader should contain only
 		/// one word. The words need to be in lowercase if you make use of an
 		/// Analyzer which uses LowerCaseFilter (like StandardAnalyzer).
-		/// 
 		/// </summary>
-		/// <param name="reader">Reader containing the wordlist
-		/// </param>
-		/// <returns> A HashSet with the reader's words
-		/// </returns>
-		public static System.Collections.Hashtable GetWordSet(System.IO.TextReader reader)
+		/// <param name="reader">Reader containing the wordlist</param>
+		/// <returns>A HashSet with the reader's words</returns>
+		public static HashSet<string> GetWordSet(System.IO.TextReader reader)
 		{
-			System.Collections.Hashtable result = new System.Collections.Hashtable();
+			HashSet<string> result = new HashSet<string>();
 			System.IO.TextReader br = null;
 			try
 			{
 				System.String word = null;
 				while ((word = reader.ReadLine()) != null)
 				{
-					SupportClass.CollectionsHelper.Add(result, word.Trim());
+				    result.Add(word.Trim());
 				}
 			}
 			finally
@@ -128,9 +114,9 @@ namespace Lucene.Net.Analysis
 		/// </param>
 		/// <returns> A HashSet with the reader's words
 		/// </returns>
-        public static System.Collections.Hashtable GetWordSet(System.IO.TextReader reader, System.String comment)
+        public static HashSet<string> GetWordSet(System.IO.TextReader reader, System.String comment)
 		{
-			System.Collections.Hashtable result = new System.Collections.Hashtable();
+            HashSet<string> result = new HashSet<string>();
 			System.IO.StreamReader br = null;
 			try
 			{
@@ -139,7 +125,7 @@ namespace Lucene.Net.Analysis
 				{
 					if (word.StartsWith(comment) == false)
 					{
-						SupportClass.CollectionsHelper.Add(result, word.Trim());
+					    result.Add(word.Trim());
 					}
 				}
 			}
@@ -161,11 +147,11 @@ namespace Lucene.Net.Analysis
 		/// <returns> stem dictionary that overrules the stemming algorithm
 		/// </returns>
 		/// <throws>  IOException  </throws>
-		public static System.Collections.Hashtable GetStemDict(System.IO.FileInfo wordstemfile)
+		public static Dictionary<string, string> GetStemDict(System.IO.FileInfo wordstemfile)
 		{
 			if (wordstemfile == null)
 				throw new System.NullReferenceException("wordstemfile may not be null");
-			System.Collections.Hashtable result = new System.Collections.Hashtable();
+            Dictionary<string, string> result = new Dictionary<string, string>();
 			System.IO.StreamReader br = null;
 			System.IO.StreamReader fr = null;
 			try

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/ChangeNotes.txt
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/ChangeNotes.txt?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/ChangeNotes.txt (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/ChangeNotes.txt Sat Nov 12 10:44:21 2011
@@ -5,34 +5,47 @@ would work properly even if it did.  The
 being done to try and emulate java's classes as much as possible
 and because of that, there may be unexpected behavior until
 tests have been written and prove that they work as intended.
-##############################################################
 
+There are some design choices in attempts to emulate java
+librarys that may change.
+##############################################################
 
-TODO: Tests need to be written for SupportClass.HashMap
-TODO: Tests need to be written for SupportClass.WeakHashMap -> the entire implementation is probably all wrong
+Many of the changes made to collections may be unnecessary.  HashMap might
+be unnecessary, if Lucene doesn't use null keys at any point in program execution
+so that a normal Dictionary may be used.  Other unnecessary additions might
+be LinkedList over List in some cases, but these have to be checked on a per
+case basis for behavior and performance...
+
+TODO: AttributeSource Attribute Factory uses generic types differently than java, needs to be documented properly
+
+Use of ISet<T> instead of HashSet<T> also may be unnecessary, when comparing the number of Sets java has vs .net
+
+TODO: Tests need to be written for HashMap -> verify behavior
+TODO: Tests need to be written for WeakDictionary
+TODO: Comments need to be written for WeakDictionary
+TODO: Tests need to be written for IdentityDictionary -> Verify behavior
 
 Util.Version changed to Enum with extension methods
 ThreadInterruptedException.java was not ported, because it only exists in the java because the build-in is a checked exception
-RamUsageEstimator unsure how to port.  Moves seen Dict to a Dict<object,object> (seemingly pointless), but java sets the Map<object,object> as a new IdentityHashMap()
+
+TODO: ParallelMultiSearcher - cannot be ported identically to java.  Check differences.  A main one is that there are not named threads.
+
 PriorityQueue in InsertWithOverflow, java returns null, I set it to return default(T).  I don't think it's an issue, it looks like java is doing unchecked generics.  We should look back and determine if we should contrain the generic to class
 Parameter.cs - Commented code in Lucene.Net is uncommented in java
 TODO: Finish HashMap<K,V> and confirm it properly emulates java's behavior
 TODO: NamedThreadFactory.java
 TODO: DummyConcurrentLock.java
 TODO: Constants.java - LUCENE_MAIN_VERSION, and static constructor
-TODO: AverageGuessMemoryModel.java - IdentityHashMap okay to change to Dictionary<k,v>?
-TODO: AttributeSource.java needs WeakHashMap<k,v> ported from java
 TODO: Implement Cache.cs' IDisposable properly
 TODO: Check on FSDirectory(System.IO.DirectoryInfo path, LockFactory lockFactory)
-TODO: Revisit rewriting in ThreadInterruptedException
 TODO: LockStressTest.java
 TODO: MMapDirectory.java -> ThreadInterruptedException
 TODO: NIOFSDirectory.java
 TODO: RAMDirectory.java
+
 Util, Store (All files touched - Any that are present in 3.0.3 still need to be added)
 TODO: BooleanClause.java - Can't override tostring on Enum.  Leave type-safe or override with extension method?
 TODO: FieldValueHitQueue.Entry had to be made public for accessibility.  Look further into that.
-TODO: ParallelMultiSearcher - uses a lot of the java concurrency libs, which don't have perfect .NET equivalents
 TODO: FuzzyQuery - uses java.util.PriorityQueue<T>, which .net does not have.  Revisit later.
 TODO: ParallelReader - extra data types, TreeMap<Tk,Tv>
 
@@ -44,108 +57,4 @@ TODO: ConcurrentMergeScheduler - ThreadI
 
 CHANGED: FieldCacheRangeFilter - Expects nullable primitives for the anonymous range filters<T> -> replaced with Nulable<T>
 CONFIRM: IndexWriter.java - ReaderPool.Close/Commit/Get lock/sync changes - may be wrong - ThreadInterruptedException(ie)
-
-CachingWrapperFilter.java -> changes ported
-TermInfosReader.java -> changes ported
-Analyzer.java -> changes ported
-FieldsReader.java -> changes ported
-SegmentReader.java -> changes ported
-TopDocsCollector.java -> changes ported
-FieldDocSortedHitQueue.java -> changes ported
-PhraseQueue.java -> changes ported
-HitQueue.java -> changes ported
-FieldValueHitQueue.java -> changes ported
-MultipleTermsPositions.java -> changes ported
-TopFieldCollector.java -> changes ported
-SpanOrQuery.java -> changes ported
-NearSpansUnordered.java -> changes ported
-TopScoreDocCollector.java -> changes ported
-CompoundFileReader.java -> changes ported
-AbstractField.java -> changes ported
-Fieldable.java -> changes ported
-Field.java -> changes ported
-NumericField.java -> changes ported
-IndexReader.java -> changes ported
-DirectoryReader.java -> changes ported
-FilterIndexReader.java -> changes ported
-MultiReader.java -> changes ported
-CachingSpanFilter.java -> changes ported
-ConjunctionScorer.java -> changes ported
-Query.Java -> changes ported
-ConstantScoreQuery.java -> changes ported
-DisjunctionMaxScorer.java -> changes ported
-DisjunctionSumScorer.java -> changes ported
-FieldCacheTermsFilter.java -> changes ported
-FilteredDocIdSetIterator.java -> changes ported
-FilteredQuery.java -> changes ported
-CustomScoreQuery.java -> changes ported
-ValueSourceQuery.java -> changes ported
-MatchAllDocsQuery.java -> changes ported
-MultiPhraseQuery.java -> changes ported
-TermScorer.java -> changes ported
-SpanScorer.java -> changes ported
-ScoreCachingWrappingScorer.java -> changes ported
-PhraseQuery.java -> changes ported
-PhraseScorer.java -> changes ported
-ReqExclScorer.java -> changes ported
-ReqOptSumScorer.java -> changes ported
-BooleanQuery.java -> changes ported
-DisjunctionMaxQuery.java -> changes ported
-SpanFirstQuery.java -> changes ported
-SpanQuery.java -> changes ported
-SpanNearQuery.java -> changes ported
-SpanNotQuery.java -> changes ported
-SpanTermQuery.java -> changes ported
-FieldMaskingSpanQuery.java -> changes ported
-TermQuery.java -> changes ported
-WildCardQuery.java -> changes ported
-FieldCacheRangeFilter.java -> changes ported
-Document.java -> changes ported
-FieldsWriter.java -> changes ported
-FieldInfos.java -> changes ported
-DocFieldProcessorPerThread.java -> changes ported
-PayloadSpanUtil.java -> changes ported
-IndexSearcher.java -> changes ported
-SegmentInfo.java -> changes ported
-IndexWriter.java -> changes ported
-QueryParser.java -> changes ported
-MultiFieldQueryParser.java -> changes ported
-MergePolicy.java -> changes ported
-LogByteSizeMergePolicy.java -> changes ported
-LogMergePolicy.java -> changes ported
-LogDocMergePolicy.java -> changes ported
-BooleanScorer2.java -> changes ported
-CheckIndex.java -> changes ported
-ConcurrentMergeScheduler.java -> changes ported
-ReadOnlyDirectoryReader.java -> changes ported
-IndexFileDeleter.java -> changes ported
-DocumentsWriter.java -> changes ported
-BufferedDeletes.java -> changes ported
-ByteBlockPool.java -> changes ported
-DocInverterPerField.java -> changes ported
-DocInverterPerThread.java -> changes ported
-SegmentWriteState.java -> changes ported
-DocFieldProcessor.java -> changes ported
-DocFieldConsumer.java -> changes ported
-DocFieldConsumers.java -> changes ported
-DocInverter.java -> changes ported
-SingleTermEnum.java -> changes ported
-FilteredTermEnum.java -> changes ported
-NormsWriter.java -> changes ported
-InvertedDocEndConsumer.java -> changes ported
-DocumentsWriterThreadState.java -> changes ported
-NativeFSLockFactory.java -> changes ported
-ExactPhraseScorer.java -> changes ported
-SloppyPhraseScorer.java -> changes ported
-Explanation.java -> changes ported
-Similarity.java -> changes ported
-SimilarityDelegator.java -> changes ported
-SegmentInfos.java -> changes ported (doesn't inherit from a generic equiv as in Java)
-FlagsAttributeImpl.java -> changes ported
-OffsetAttributeImpl.java -> changes ported
-PayloadAttributeImpl.java -> changes ported
-PositionIncrementAttributeImpl.java -> changes ported
-TermAttributeImpl.java -> changes ported
-TypeAttributeImpl.java -> changes ported
-ASCIIFoldingFilter.java -> changes ported
-TokenStream.java -> changes ported
\ No newline at end of file
+CONFIRM: Java LinkedList behavior compared to C#.  Used extensively in Attributes, filters and the like
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/CompressionTools.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/CompressionTools.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/CompressionTools.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/CompressionTools.cs Sat Nov 12 10:44:21 2011
@@ -22,7 +22,7 @@
 // http://www.icsharpcode.net/OpenSource/SharpZipLib/
 
 using System;
-
+using Lucene.Net.Support;
 using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
 
 namespace Lucene.Net.Documents
@@ -56,7 +56,7 @@ namespace Lucene.Net.Documents
 			* the uncompressed data. */
 			System.IO.MemoryStream bos = new System.IO.MemoryStream(length);
 
-            SupportClass.SharpZipLib.Deflater compressor = SupportClass.SharpZipLib.CreateDeflater();
+            Deflater compressor = SharpZipLib.CreateDeflater();
 			
 			try
 			{
@@ -82,19 +82,19 @@ namespace Lucene.Net.Documents
 		/// <summary>Compresses the specified byte range, with default BEST_COMPRESSION level </summary>
 		public static byte[] Compress(byte[] value_Renamed, int offset, int length)
         {
-			return Compress(value_Renamed, offset, length, SupportClass.SharpZipLib.Deflater.BEST_COMPRESSION);
+			return Compress(value_Renamed, offset, length, Deflater.BEST_COMPRESSION);
 		}
 		
 		/// <summary>Compresses all bytes in the array, with default BEST_COMPRESSION level </summary>
 		public static byte[] Compress(byte[] value_Renamed)
 		{
-            return Compress(value_Renamed, 0, value_Renamed.Length, SupportClass.SharpZipLib.Deflater.BEST_COMPRESSION);
+            return Compress(value_Renamed, 0, value_Renamed.Length, Deflater.BEST_COMPRESSION);
 		}
 		
 		/// <summary>Compresses the String value, with default BEST_COMPRESSION level </summary>
 		public static byte[] CompressString(System.String value_Renamed)
 		{
-            return CompressString(value_Renamed, SupportClass.SharpZipLib.Deflater.BEST_COMPRESSION);
+            return CompressString(value_Renamed, Deflater.BEST_COMPRESSION);
 		}
 		
 		/// <summary>Compresses the String value using the specified
@@ -116,7 +116,7 @@ namespace Lucene.Net.Documents
 			// Create an expandable byte array to hold the decompressed data
 			System.IO.MemoryStream bos = new System.IO.MemoryStream(value_Renamed.Length);
 			
-			SupportClass.SharpZipLib.Inflater decompressor = SupportClass.SharpZipLib.CreateInflater();
+			Inflater decompressor = SharpZipLib.CreateInflater();
 			
 			try
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/DateField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/DateField.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/DateField.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Document/DateField.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using NumericUtils = Lucene.Net.Util.NumericUtils;
 using NumericRangeQuery = Lucene.Net.Search.NumericRangeQuery;
 using PrefixQuery = Lucene.Net.Search.PrefixQuery;
@@ -68,7 +68,7 @@ namespace Lucene.Net.Documents
 		}
 		
 		// make date strings long enough to last a millenium
-        private static int DATE_LEN = SupportClass.Number.ToString(1000L * 365 * 24 * 60 * 60 * 1000, SupportClass.Number.MAX_RADIX).Length;
+        private static int DATE_LEN = Number.ToString(1000L * 365 * 24 * 60 * 60 * 1000, Number.MAX_RADIX).Length;
 
 		public static System.String MIN_DATE_STRING()
 		{
@@ -78,7 +78,7 @@ namespace Lucene.Net.Documents
 		public static System.String MAX_DATE_STRING()
 		{
 			char[] buffer = new char[DATE_LEN];
-            char c = SupportClass.Character.ForDigit(SupportClass.Character.MAX_RADIX - 1, SupportClass.Character.MAX_RADIX);
+            char c = Character.ForDigit(Character.MAX_RADIX - 1, Character.MAX_RADIX);
 			for (int i = 0; i < DATE_LEN; i++)
 				buffer[i] = c;
 			return new System.String(buffer);
@@ -103,7 +103,7 @@ namespace Lucene.Net.Documents
 			if (time < 0)
 				throw new System.SystemException("time '" + time + "' is too early, must be >= 0");
 
-            System.String s = SupportClass.Number.ToString(time, SupportClass.Character.MAX_RADIX);
+            System.String s = Number.ToString(time, Character.MAX_RADIX);
 			
 			if (s.Length > DATE_LEN)
 				throw new System.SystemException("time '" + time + "' is too late, length of string " + "representation must be <= " + DATE_LEN);
@@ -123,7 +123,7 @@ namespace Lucene.Net.Documents
 		/// <summary>Converts a string-encoded date into a millisecond time. </summary>
 		public static long StringToTime(System.String s)
 		{
-            return SupportClass.Number.Parse(s, SupportClass.Number.MAX_RADIX);
+            return Number.Parse(s, Number.MAX_RADIX);
 		}
 		/// <summary>Converts a string-encoded date into a Date object. </summary>
         public static System.DateTime StringToDate(System.String s)

Added: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/FileDiffs.txt
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/FileDiffs.txt?rev=1201243&view=auto
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/FileDiffs.txt (added)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/FileDiffs.txt Sat Nov 12 10:44:21 2011
@@ -0,0 +1,460 @@
+analysis\standard\
+analysis\standard\package.html - IDENTICAL
+analysis\standard\READ_BEFORE_REGENERATING.txt - New File
+analysis\standard\StandardAnalyzer.java - PORTED
+analysis\standard\StandardFilter.java - PORTED
+analysis\standard\StandardTokenizer.java - PORTED
+analysis\standard\StandardTokenizerImpl.java - PORTED
+analysis\standard\StandardTokenizerImpl.jflex - PORTED
+
+analysis\tokenattributes\
+analysis\tokenattributes\FlagsAttribute.java - IDENTICAL
+analysis\tokenattributes\FlagsAttributeImpl.java - PORTED
+nalysis\tokenattributes\OffsetAttribute.java - IDENTICAL
+analysis\tokenattributes\OffsetAttributeImpl.java - PORTED
+analysis\tokenattributes\PayloadAttribute.java - IDENTICAL
+analysis\tokenattributes\PayloadAttributeImpl.java - PORTED
+analysis\tokenattributes\PositionIncrementAttribute.java - IDENTICAL
+analysis\tokenattributes\PositionIncrementAttributeImpl.java - PORTED
+analysis\tokenattributes\TermAttribute.java - IDENTICAL
+analysis\tokenattributes\TermAttributeImpl.java - PORTED
+analysis\tokenattributes\TypeAttribute.java - IDENTICAL
+analysis\tokenattributes\TypeAttributeImpl.java - PORTED
+
+analysis\
+analysis\Analyzer.java - PORTED
+analysis\ASCIIFoldingFilter.java - PORTED
+analysis\BaseCharFilter.java - PORTED
+analysis\CachingTokenFilter.java - PORTED
+analysis\CharacterCache.java - Removed in 3.x
+analysis\CharArraySet.java - PORTED
+analysis\CharFilter.java - PORTED
+analysis\CharReader.java - PORTED
+analysis\CharStream.java - IDENTICAL
+analysis\CharTokenizer.java - PORTED
+analysis\ISOLatin1AccentFilter.java - PORTED
+analysis\KeywordAnalyzer.java - PORTED
+analysis\KeywordTokenizer.java - PORTED
+analysis\LengthFilter.java - PORTED
+analysis\LetterTokenizer.java - PORTED
+analysis\LowerCaseFilter.java - PORTED
+analysis\LowerCaseTokenizer.java - PORTED
+analysis\MappingCharFilter.java - PORTED
+analysis\NormalizeCharMap.java - PORTED
+analysis\NumericTokenStream.java - PORTED
+analysis\package.html - Text files are different
+analysis\PerFieldAnalyzerWrapper.java - PORTED
+analysis\PorterStemFilter.java - PORTED
+analysis\PorterStemmer.java - PORTED
+analysis\SimpleAnalyzer.java - PORTED
+analysis\SinkTokenizer.java - Removed in 3.x
+analysis\StopAnalyzer.java - PORTED
+analysis\StopFilter.java - PORTED
+analysis\TeeSinkTokenFilter.java - PORTED
+analysis\TeeTokenFilter.java - Removed in 3.x
+analysis\Token.java - PORTED
+analysis\TokenFilter.java - PORTED
+analysis\Tokenizer.java - PORTED
+analysis\TokenStream.java - PORTED
+analysis\TokenWrapper.java - Removed in 3.x
+analysis\WhitespaceAnalyzer.java - PORTED
+analysis\WhitespaceTokenizer.java - PORTED
+analysis\WordlistLoader.java - PORTED
+
+document\
+document\AbstractField.java - PORTED
+document\CompressionTools.java - Text files are different
+document\DateField.java - Text files are different
+document\DateTools.java - Text files are different
+document\Document.java - PORTED
+document\Field.java - PORTED
+document\Fieldable.java - PORTED
+document\FieldSelector.java - IDENTICAL
+document\FieldSelectorResult.java - Text files are different
+document\LoadFirstFieldSelector.java - IDENTICAL
+document\MapFieldSelector.java - Text files are different
+document\NumberTools.java - Text files are different
+document\NumericField.java - PORTED
+document\package.html - IDENTICAL
+document\SetBasedFieldSelector.java - Text files are different
+
+index\
+	index\AbstractAllTermDocs.java - IDENTICAL
+	index\AllTermDocs.java - IDENTICAL
+	index\BufferedDeletes.java - PORTED
+	index\ByteBlockPool.java - PORTED
+	index\ByteSliceReader.java - Text files are different
+	index\ByteSliceWriter.java - IDENTICAL
+	index\CharBlockPool.java - IDENTICAL
+	index\CheckIndex.java - PORTED
+	index\CompoundFileReader.java - PORTED
+	index\CompoundFileWriter.java - Text files are different
+	index\ConcurrentMergeScheduler.java - PORTED
+	index\CorruptIndexException.java - IDENTICAL
+	index\DefaultSkipListReader.java - Text files are different
+	index\DefaultSkipListWriter.java - Text files are different
+	index\DirectoryOwningReader.java - Removed in 3.x
+	index\DirectoryReader.java - Text files are different
+	index\DocConsumer.java - PORTED
+	index\DocConsumerPerThread.java - IDENTICAL
+	index\DocFieldConsumer.java - PORTED
+	index\DocFieldConsumerPerField.java - IDENTICAL
+	index\DocFieldConsumerPerThread.java - IDENTICAL
+	index\DocFieldConsumers.java - PORTED
+	index\DocFieldConsumersPerField.java - Text files are different
+	index\DocFieldConsumersPerThread.java - Text files are different
+	index\DocFieldProcessor.java - PORTED
+	index\DocFieldProcessorPerField.java - IDENTICAL
+	index\DocFieldProcessorPerThread.java - PORTED
+	index\DocInverter.java - PORTED
+	index\DocInverterPerField.java - PORTED
+	index\DocInverterPerThread.java - PORTED
+	index\DocumentsWriter.java - PORTED
+	index\DocumentsWriterThreadState.java - PORTED
+	index\FieldInfo.java - Text files are different
+	index\FieldInfos.java - PORTED
+	index\FieldInvertState.java - IDENTICAL
+	index\FieldReaderException.java - IDENTICAL
+	index\FieldSortedTermVectorMapper.java - Text files are different
+	index\FieldsReader.java - PORTED
+	index\FieldsWriter.java - PORTED
+	index\FilterIndexReader.java - PORTED
+	index\FormatPostingsDocsConsumer.java - IDENTICAL
+	index\FormatPostingsDocsWriter.java - Text files are different
+	index\FormatPostingsFieldsConsumer.java - IDENTICAL
+	index\FormatPostingsFieldsWriter.java - Text files are different
+	index\FormatPostingsPositionsConsumer.java - Text files are different
+	index\FormatPostingsPositionsWriter.java - Text files are different
+	index\FormatPostingsTermsConsumer.java - IDENTICAL
+	index\FormatPostingsTermsWriter.java - Text files are different
+	index\FreqProxFieldMergeState.java - IDENTICAL
+	index\FreqProxTermsWriter.java - Text files are different
+	index\FreqProxTermsWriterPerField.java - Text files are different
+	index\FreqProxTermsWriterPerThread.java - Text files are different
+	index\IndexCommit.java - Text files are different
+	index\IndexCommitPoint.java - Removed in 3.x
+	index\IndexDeletionPolicy.java - Text files are different
+	index\IndexFileDeleter.java - PORTED
+	index\IndexFileNameFilter.java - Text files are different
+	index\IndexFileNames.java - Text files are different
+	index\IndexModifier.java - Removed in 3.x
+	index\IndexReader.java - PORTED
+	index\IndexWriter.java - PORTED
+	index\IntBlockPool.java - IDENTICAL
+	index\InvertedDocConsumer.java - PORTED
+	index\InvertedDocConsumerPerField.java - IDENTICAL
+	index\InvertedDocConsumerPerThread.java - IDENTICAL
+	index\InvertedDocEndConsumer.java - PORTED
+	index\InvertedDocEndConsumerPerField.java - IDENTICAL
+	index\InvertedDocEndConsumerPerThread.java - IDENTICAL
+	index\KeepOnlyLastCommitDeletionPolicy.java - Text files are different
+	index\LogByteSizeMergePolicy.java - PORTED
+	index\LogDocMergePolicy.java - PORTED
+	index\LogMergePolicy.java - PORTED
+	index\MergeDocIDRemapper.java - IDENTICAL
+	index\MergePolicy.java - PORTED
+	index\MergeScheduler.java - IDENTICAL
+	index\MultiLevelSkipListReader.java - Text files are different
+	index\MultiLevelSkipListWriter.java - IDENTICAL
+	index\MultipleTermPositions.java - PORTED
+	index\MultiReader.java - PORTED
+	index\NormsWriter.java - PORTED
+	index\NormsWriterPerField.java - Text files are different
+	index\NormsWriterPerThread.java - Text files are different
+	index\package.html - IDENTICAL
+	index\ParallelReader.java - Text files are different
+	index\Payload.java - Text files are different
+	index\PositionBasedTermVectorMapper.java - Text files are different
+	index\RawPostingList.java - IDENTICAL
+	index\ReadOnlyDirectoryReader.java - PORTED
+	index\ReadOnlySegmentReader.java - Text files are different
+	index\ReusableStringReader.java - Text files are different
+	index\SegmentInfo.java - PORTED
+	index\SegmentInfos.java - PORTED * doesn't inherit from a generic equiv as in java
+	index\SegmentMergeInfo.java - IDENTICAL
+	index\SegmentMergeQueue.java - Text files are different
+	index\SegmentMerger.java - Text files are different
+	index\SegmentReader.java - PORTED
+	index\SegmentTermDocs.java - IDENTICAL
+	index\SegmentTermEnum.java - Text files are different
+	index\SegmentTermPositions.java - Text files are different
+	index\SegmentTermPositionVector.java - IDENTICAL
+	index\SegmentTermVector.java - Text files are different
+	index\SegmentWriteState.java - PORTED
+	index\SerialMergeScheduler.java - Text files are different
+	index\SnapshotDeletionPolicy.java - Text files are different
+	index\SortedTermVectorMapper.java - Text files are different
+	index\StaleReaderException.java - IDENTICAL
+	index\StoredFieldsWriter.java - Text files are different
+	index\StoredFieldsWriterPerThread.java - IDENTICAL
+	index\Term.java - Text files are different
+	index\TermBuffer.java - Text files are different
+	index\TermDocs.java - Text files are different
+	index\TermEnum.java - Text files are different
+	index\TermFreqVector.java - IDENTICAL
+	index\TermInfo.java - IDENTICAL
+	index\TermInfosReader.java - PORTED
+	index\TermInfosWriter.java - IDENTICAL
+	index\TermPositions.java - IDENTICAL
+	index\TermPositionVector.java - IDENTICAL
+	index\TermsHash.java - PORTED
+	index\TermsHashConsumer.java - Text files are different
+	index\TermsHashConsumerPerField.java - IDENTICAL
+	index\TermsHashConsumerPerThread.java - IDENTICAL
+	index\TermsHashPerField.java - Text files are different
+	index\TermsHashPerThread.java - Text files are different
+	index\TermVectorEntry.java - Text files are different
+	index\TermVectorEntryFreqSortedComparator.java - Text files are different
+	index\TermVectorMapper.java - IDENTICAL
+	index\TermVectorOffsetInfo.java - Text files are different
+	index\TermVectorsReader.java - Text files are different
+	index\TermVectorsTermsWriter.java - Text files are different
+	index\TermVectorsTermsWriterPerField.java - Text files are different
+	index\TermVectorsTermsWriterPerThread.java - Text files are different
+	index\TermVectorsWriter.java - IDENTICAL
+
+messages\
+	messages\Message.java - IDENTICAL
+	messages\MessageImpl.java - Text files are different
+	messages\NLS.java - Text files are different
+	messages\NLSException.java - IDENTICAL
+	messages\package.html - IDENTICAL
+
+queryParser\
+	queryParser\CharStream.java - Text files are different
+	queryParser\FastCharStream.java - IDENTICAL
+	queryParser\MultiFieldQueryParser.java - PORTED
+	queryParser\package.html - IDENTICAL
+	queryParser\ParseException.java - Text files are different
+	queryParser\QueryParser.java - PORTED
+	queryParser\QueryParser.jj - Text files are different
+	queryParser\QueryParserConstants.java - IDENTICAL
+	queryParser\QueryParserTokenManager.java - Text files are different
+	queryParser\Token.java - Text files are different
+	queryParser\TokenMgrError.java - Text files are different
+
+search\function\
+	search\function\ByteFieldSource.java - Text files are different
+	search\function\CustomScoreProvider.java - IDENTICAL
+	search\function\CustomScoreQuery.java - PORTED
+	search\function\DocValues.java - IDENTICAL
+	search\function\FieldCacheSource.java - Text files are different
+	search\function\FieldScoreQuery.java - Text files are different
+	search\function\FloatFieldSource.java - Text files are different
+	search\function\IntFieldSource.java - Text files are different
+	search\function\MultiValueSource.java - Removed in 3.x
+	search\function\OrdFieldSource.java - Text files are different
+	search\function\package.html - IDENTICAL
+	search\function\ReverseOrdFieldSource.java - Text files are different
+	search\function\ShortFieldSource.java - Text files are different
+	search\function\ValueSource.java - Text files are different
+	search\function\ValueSourceQuery.java - PORTED
+
+search\payloads\
+	search\payloads\AveragePayloadFunction.java - Text files are different
+	search\payloads\BoostingTermQuery.java - Removed in 3.x
+	search\payloads\MaxPayloadFunction.java - Text files are different
+	search\payloads\MinPayloadFunction.java - Text files are different
+	search\payloads\package.html - IDENTICAL
+	search\payloads\PayloadFunction.java - Text files are different
+	search\payloads\PayloadNearQuery.java - Text files are different
+	search\payloads\PayloadSpanUtil.java - PORTED
+	search\payloads\PayloadTermQuery.java - Text files are different
+
+search\spans\
+	search\spans\FieldMaskingSpanQuery.java - PORTED
+	search\spans\NearSpansOrdered.java - PORTED
+	search\spans\NearSpansUnordered.java - Text files are different
+	search\spans\package.html - IDENTICAL
+	search\spans\SpanFirstQuery.java - PORTED
+	search\spans\SpanNearQuery.java - PORTED
+	search\spans\SpanNotQuery.java - PORTED
+	search\spans\SpanOrQuery.java - PORTED
+	search\spans\SpanQuery.java - PORTED
+	search\spans\Spans.java - Text files are different
+	search\spans\SpanScorer.java - PORTED
+	search\spans\SpanTermQuery.java - PORTED
+	search\spans\SpanWeight.java - Text files are different
+	search\spans\TermSpans.java - Text files are different
+
+search
+	BooleanClause.java - Text files are different
+	BooleanQuery.java - PORTED
+	BooleanScorer.java - Text files are different
+	BooleanScorer2.java - PORTED
+	CachingSpanFilter.java - PORTED
+	CachingWrapperFilter.java - PORTED
+	Collector.java - Text files are different
+	ComplexExplanation.java - Text files are different
+	ConjunctionScorer.java - PORTED
+	ConstantScoreQuery.java - PORTED
+	ConstantScoreRangeQuery.java - Removed in 3.x
+	DefaultSimilarity.java - Text files are different
+	DisjunctionMaxQuery.java - PORTED
+	DisjunctionMaxScorer.java - PORTED
+	DisjunctionSumScorer.java - PORTED
+	DocIdSet.java - Text files are different
+	DocIdSetIterator.java - Text files are different
+	ExactPhraseScorer.java - PORTED
+	Explanation.java - PORTED
+	ExtendedFieldCache.java - Removed in 3.x
+	FieldCache.java - Text files are different
+	FieldCacheImpl.java - Text files are different
+	FieldCacheRangeFilter.java - PORTED
+	FieldCacheTermsFilter.java - PORTED
+	FieldComparator.java - Text files are different
+	FieldComparatorSource.java - IDENTICAL
+	FieldDoc.java - Text files are different
+	FieldDocSortedHitQueue.java - PORTED
+	FieldSortedHitQueue.java - Removed in 3.x
+	FieldValueHitQueue.java - PORTED
+	Filter.java - Text files are different
+	FilteredDocIdSet.java - Text files are different
+	FilteredDocIdSetIterator.java - PORTED
+	FilteredQuery.java - PORTED
+	FilteredTermEnum.java - PORTED
+	FilterManager.java - Text files are different
+	FuzzyQuery.java - Text files are different
+	FuzzyTermEnum.java - Text files are different
+	Hit.java - Removed in 3.x
+	HitCollector.java - Removed in 3.x
+	HitCollectorWrapper.java - Removed in 3.x
+	HitIterator.java - Removed in 3.x
+	HitQueue.java - PORTED
+	Hits.java - Removed in 3.x
+	IndexSearcher.java - PORTED
+	MatchAllDocsQuery.java - PORTED
+	MultiPhraseQuery.java - PORTED
+	MultiSearcher.java - PORTED * Double check on MultiSearcherCallableNoSort/Sort, use use of new Object() rather than dummy lock.  Shouldn't make a difference, seems to be used because ParallelMultiSearcher uses a ReEntrantLock instead of synchronized
+	MultiTermQuery.java - Text files are different
+	MultiTermQueryWrapperFilter.java - Text files are different
+	NumericRangeFilter.java - Text files are different
+	NumericRangeQuery.java - Text files are different
+	package.html - Text files are different
+	ParallelMultiSearcher.java - PORTED
+	PhrasePositions.java - IDENTICAL
+	PhraseQuery.java - PORTED
+	PhraseQueue.java - PORTED
+	PhraseScorer.java - PORTED
+	PositiveScoresOnlyCollector.java - Text files are different
+	PrefixFilter.java - Text files are different
+	PrefixQuery.java - Text files are different
+	PrefixTermEnum.java - Text files are different
+	Query.java - PORTED
+	QueryFilter.java - Removed in 3.x
+	QueryTermVector.java - Text files are different
+	QueryWrapperFilter.java - Text files are different
+	RangeFilter.java - Removed in 3.x
+	RangeQuery.java - Removed in 3.x
+	ReqExclScorer.java - PORTED
+	ReqOptSumScorer.java - PORTED
+	ScoreCachingWrappingScorer.java - PORTED
+	ScoreDoc.java - Text files are different
+	ScoreDocComparator.java - Removed in 3.x
+	Scorer.java - Text files are different
+	Searchable.java - Text files are different
+	Searcher.java - Text files are different
+	Similarity.java - PORTED
+	SimilarityDelegator.java - PORTED
+	SingleTermEnum.java - PORTED
+	SloppyPhraseScorer.java - PORTED
+	Sort.java - Text files are different
+	SortComparator.java - Removed in 3.x
+	SortComparatorSource.java - Removed in 3.x
+	SortField.java - Text files are different
+	SpanFilter.java - IDENTICAL
+	SpanFilterResult.java - Text files are different
+	SpanQueryFilter.java - Text files are different
+	TermQuery.java - PORTED
+	TermRangeFilter.java - Text files are different
+	TermRangeQuery.java - Text files are different
+	TermRangeTermEnum.java - Text files are different
+	TermScorer.java - PORTED
+	TimeLimitedCollector.java - Removed in 3.x
+	TimeLimitingCollector.java - Text files are different
+	TopDocCollector.java - Removed in 3.x
+	TopDocs.java - Text files are different
+	TopDocsCollector.java - Text files are different
+	TopFieldCollector.java - PORTED
+	TopFieldDocCollector.java - Removed in 3.x
+	TopFieldDocs.java - IDENTICAL
+	TopScoreDocCollector.java - PORTED
+	Weight.java - IDENTICAL
+	WildcardQuery.java - PORTED
+	WildcardTermEnum.java - Text files are different
+
+store
+	AlreadyClosedException.java - IDENTICAL
+	BufferedIndexInput.java - PORTED
+	BufferedIndexOutput.java - PORTED
+	ChecksumIndexInput.java - PORTED
+	ChecksumIndexOutput.java - PORTED
+	Directory.java - PORTED
+	FileSwitchDirectory.java - PORTED
+	FSDirectory.java - Text files are different
+	FSLockFactory.java - IDENTICAL
+	IndexInput.java - PORTED * See IDisposable
+	IndexOutput.java - PORTED * See IDisposable
+	Lock.java - PORTED
+	LockFactory.java - IDENTICAL
+	LockObtainFailedException.java - IDENTICAL
+	LockReleaseFailedException.java - IDENTICAL
+	LockStressTest.java - Text files are different
+	LockVerifyServer.java - IDENTICAL
+	MMapDirectory.java - Text files are different - PORT ISSUES
+	NativeFSLockFactory.java - PORTED
+	NIOFSDirectory.java - Text files are different - PORT ISSUES
+	NoLockFactory.java - PORTED
+	NoSuchDirectoryException.java - IDENTICAL
+	package.html - IDENTICAL
+	RAMDirectory.java - PORTED
+	RAMFile.java - PORTED
+	RAMInputStream.java - PORTED
+	RAMOutputStream.java - PORTED
+	SimpleFSDirectory.java - PORTED
+	SimpleFSLockFactory.java - PORTED
+	SingleInstanceLockFactory.java - PORTED
+	VerifyingLockFactory.java - PORTED
+
+util\cache
+	Cache.java - PORTED
+	SimpleLRUCache.java - PORTED
+	SimpleMapCache.java - PORTED
+
+lucene\util
+	ArrayUtil.java - IDENTICAL
+	Attribute.java - IDENTICAL
+	AttributeImpl.java - PORTED
+	AttributeSource.java - PORTED
+	AverageGuessMemoryModel.java - PORTED
+	BitUtil.java - IDENTICAL
+	BitVector.java - PORTED
+	CloseableThreadLocal.java - PORTED
+	Constants.java - PORTED * Static Constructor for LUCENE_MAIN_VERSION differents greatly from java.
+	DocIdBitSet.java - PORTED
+	DummyConcurrentLock.java - New in 3.x
+	FieldCacheSanityChecker.java - PORTED
+	IndexableBinaryStringTools.java - IDENTICAL
+	MapOfSets.java - PORTED
+	MemoryModel.java - IDENTICAL
+	NamedThreadFactory.java - New in 3.x
+	NumericUtils.java - IDENTICAL
+	OpenBitSet.java - PORTED
+	OpenBitSetDISI.java - IDENTICAL
+	OpenBitSetIterator.java - PORTED
+	package.html - IDENTICAL
+	Parameter.java - PORTED
+	PriorityQueue.java - PORTED
+	RamUsageEstimator.java - PORTED
+	ReaderUtil.java - PORTED
+	ScorerDocQueue.java - IDENTICAL
+	SimpleStringInterner.java - PORTED
+	SmallFloat.java - IDENTICAL
+	SortedVIntList.java - PORTED
+	SorterTemplate.java - IDENTICAL
+	StringHelper.java - PORTED
+	StringInterner.java - IDENTICAL
+	ThreadInterruptedException.java - new in 3.x (NOT NEEDED IN .NET)
+	ToStringUtils.java - IDENTICAL
+	UnicodeUtil.java - IDENTICAL
+	Version.java - PORTED
\ No newline at end of file

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/BufferedDeletes.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/BufferedDeletes.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/BufferedDeletes.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/BufferedDeletes.cs Sat Nov 12 10:44:21 2011
@@ -18,6 +18,7 @@
 using System;
 using System.Collections.Generic;
 using Lucene.Net.Search;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -34,7 +35,7 @@ namespace Lucene.Net.Index
 	{
 		internal int numTerms;
         internal IDictionary<Term,Num> terms = null;
-		internal IDictionary<Query, int> queries = new SupportClass.HashMap<Query, int>();
+		internal IDictionary<Query, int> queries = new HashMap<Query, int>();
 		internal List<int> docIDs = new List<int>();
 		internal long bytesUsed;
         internal  bool doTermSort;
@@ -49,7 +50,7 @@ namespace Lucene.Net.Index
             }
             else
             {
-                terms = new SupportClass.HashMap<Term, Num>();
+                terms = new HashMap<Term, Num>();
             }
         }
                 
@@ -145,7 +146,7 @@ namespace Lucene.Net.Index
                     }
                     else
                     {
-                        newDeleteTerms = new SupportClass.HashMap<Term, Num>();
+                        newDeleteTerms = new HashMap<Term, Num>();
                     }
 					foreach(var entry in terms)
 					{
@@ -171,11 +172,11 @@ namespace Lucene.Net.Index
 					newDeleteDocIDs = null;
 				
 				// Remap delete-by-query
-				SupportClass.HashMap<Query, int> newDeleteQueries;
+				HashMap<Query, int> newDeleteQueries;
 				
 				if (queries.Count > 0)
 				{
-                    newDeleteQueries = new SupportClass.HashMap<Query, int>(queries.Count);
+                    newDeleteQueries = new HashMap<Query, int>(queries.Count);
 					foreach(var entry in queries)
 					{
 						int num = (int)entry.Value;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteBlockPool.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteBlockPool.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteBlockPool.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteBlockPool.cs Sat Nov 12 10:44:21 2011
@@ -34,6 +34,7 @@
 
 using System;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -152,9 +153,9 @@ namespace Lucene.Net.Index
 			buffer[newUpto + 2] = slice[upto - 1];
 			
 			// Write forwarding address at end of last slice:
-			slice[upto - 3] = (byte) (SupportClass.Number.URShift(offset, 24));
-			slice[upto - 2] = (byte) (SupportClass.Number.URShift(offset, 16));
-			slice[upto - 1] = (byte) (SupportClass.Number.URShift(offset, 8));
+			slice[upto - 3] = (byte) (Number.URShift(offset, 24));
+			slice[upto - 2] = (byte) (Number.URShift(offset, 16));
+			slice[upto - 1] = (byte) (Number.URShift(offset, 8));
 			slice[upto] = (byte) offset;
 			
 			// Write new level:

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteSliceWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteSliceWriter.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteSliceWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ByteSliceWriter.cs Sat Nov 12 10:44:21 2011
@@ -16,6 +16,7 @@
  */
 
 using System;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -92,7 +93,7 @@ namespace Lucene.Net.Index
 			while ((i & ~ 0x7F) != 0)
 			{
 				WriteByte((byte) ((i & 0x7f) | 0x80));
-				i = SupportClass.Number.URShift(i, 7);
+				i = Number.URShift(i, 7);
 			}
 			WriteByte((byte) i);
 		}

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CheckIndex.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CheckIndex.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CheckIndex.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CheckIndex.cs Sat Nov 12 10:44:21 2011
@@ -17,6 +17,7 @@
 
 using System;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 using AbstractField = Lucene.Net.Documents.AbstractField;
 using Document = Lucene.Net.Documents.Document;
 using Directory = Lucene.Net.Store.Directory;
@@ -413,7 +414,7 @@ namespace Lucene.Net.Index
 			System.String userDataString;
 			if (sis.GetUserData().Count > 0)
 			{
-				userDataString = " userData=" + SupportClass.CollectionsHelper.CollectionToString(sis.GetUserData());
+				userDataString = " userData=" + CollectionsHelper.CollectionToString(sis.GetUserData());
 			}
 			else
 			{
@@ -478,7 +479,7 @@ namespace Lucene.Net.Index
 					segInfoStat.diagnostics = diagnostics;
 					if (diagnostics.Count > 0)
 					{
-						Msg("    diagnostics = " + SupportClass.CollectionsHelper.CollectionToString(diagnostics));
+						Msg("    diagnostics = " + CollectionsHelper.CollectionToString(diagnostics));
 					}
 					
 					int docStoreOffset = info.GetDocStoreOffset();
@@ -601,7 +602,7 @@ namespace Lucene.Net.Index
 				}
 				
 				// Keeper
-				result.newSegments.Add(info.Clone());
+				result.newSegments.Add((SegmentInfo)info.Clone());
 			}
 			
 			if (0 == result.numBadSegments)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CompoundFileReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CompoundFileReader.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CompoundFileReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/CompoundFileReader.cs Sat Nov 12 10:44:21 2011
@@ -17,6 +17,7 @@
 
 using System;
 using System.Linq;
+using Lucene.Net.Support;
 using BufferedIndexInput = Lucene.Net.Store.BufferedIndexInput;
 using Directory = Lucene.Net.Store.Directory;
 using IndexInput = Lucene.Net.Store.IndexInput;
@@ -48,7 +49,7 @@ namespace Lucene.Net.Index
 		private System.String fileName;
 		
 		private IndexInput stream;
-		private SupportClass.HashMap<string, FileEntry> entries = new SupportClass.HashMap<string, FileEntry>();
+		private HashMap<string, FileEntry> entries = new HashMap<string, FileEntry>();
 		
 		
 		public CompoundFileReader(Directory dir, System.String name):this(dir, name, BufferedIndexInput.BUFFER_SIZE)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ConcurrentMergeScheduler.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ConcurrentMergeScheduler.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ConcurrentMergeScheduler.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/ConcurrentMergeScheduler.cs Sat Nov 12 10:44:21 2011
@@ -17,6 +17,7 @@
 
 using System;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 using Directory = Lucene.Net.Store.Directory;
 
 namespace Lucene.Net.Index
@@ -129,7 +130,7 @@ namespace Lucene.Net.Index
 				{
 					// Default to slightly higher priority than our
 					// calling thread
-					mergeThreadPriority = 1 + (System.Int32) SupportClass.ThreadClass.Current().Priority;
+					mergeThreadPriority = 1 + (System.Int32) ThreadClass.Current().Priority;
 					if (mergeThreadPriority > (int) System.Threading.ThreadPriority.Highest)
 						mergeThreadPriority = (int) System.Threading.ThreadPriority.Highest;
 				}
@@ -303,7 +304,7 @@ namespace Lucene.Net.Index
 			}
 		}
 		
-		public /*protected internal*/ class MergeThread:SupportClass.ThreadClass
+		public /*protected internal*/ class MergeThread:ThreadClass
 		{
 			private void  InitBlock(ConcurrentMergeScheduler enclosingInstance)
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DefaultSkipListReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DefaultSkipListReader.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DefaultSkipListReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DefaultSkipListReader.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using IndexInput = Lucene.Net.Store.IndexInput;
 
 namespace Lucene.Net.Index
@@ -114,7 +114,7 @@ namespace Lucene.Net.Index
 				{
 					payloadLength[level] = skipStream.ReadVInt();
 				}
-				delta = SupportClass.Number.URShift(delta, 1);
+				delta = Number.URShift(delta, 1);
 			}
 			else
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DirectoryReader.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DirectoryReader.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DirectoryReader.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DirectoryReader.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using Document = Lucene.Net.Documents.Document;
 using FieldSelector = Lucene.Net.Documents.FieldSelector;
 using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
@@ -101,7 +101,7 @@ namespace Lucene.Net.Index
                 
         private SegmentReader[] subReaders;
         private int[] starts; // 1st docno for each segment
-        private System.Collections.Generic.IDictionary<string, byte[]> normsCache = new SupportClass.HashMap<string, byte[]>();
+        private System.Collections.Generic.IDictionary<string, byte[]> normsCache = new HashMap<string, byte[]>();
         private int maxDoc = 0;
         private int numDocs = - 1;
         private bool hasDeletions = false;
@@ -255,7 +255,7 @@ namespace Lucene.Net.Index
             // we put the old SegmentReaders in a map, that allows us
             // to lookup a reader using its segment name
             // TODO: Leave as Object or java's Integer class?
-            System.Collections.Generic.IDictionary<string, object> segmentReaders = new SupportClass.HashMap<string, object>();
+            System.Collections.Generic.IDictionary<string, object> segmentReaders = new HashMap<string, object>();
             
             if (oldReaders != null)
             {
@@ -729,7 +729,7 @@ namespace Lucene.Net.Index
             
             while (hi >= lo)
             {
-                int mid = SupportClass.Number.URShift((lo + hi), 1);
+                int mid = Number.URShift((lo + hi), 1);
                 int midValue = starts[mid];
                 if (n < midValue)
                     hi = mid - 1;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocConsumer.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocConsumer.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocConsumer.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocConsumer.cs Sat Nov 12 10:44:21 2011
@@ -23,7 +23,7 @@ namespace Lucene.Net.Index
 	abstract class DocConsumer
 	{
 		public abstract DocConsumerPerThread AddThread(DocumentsWriterThreadState perThread);
-		public abstract void  Flush(System.Collections.ICollection threads, SegmentWriteState state);
+		public abstract void  Flush(System.Collections.Generic.ICollection<DocConsumerPerThread> threads, SegmentWriteState state);
 		public abstract void  CloseDocStore(SegmentWriteState state);
 		public abstract void  Abort();
 		public abstract bool FreeRAM();

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldConsumers.cs Sat Nov 12 10:44:21 2011
@@ -17,6 +17,7 @@
 
 using System;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
 
 namespace Lucene.Net.Index
@@ -52,8 +53,8 @@ namespace Lucene.Net.Index
         public override void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state)
 		{
 
-            var oneThreadsAndFields = new SupportClass.HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
-			var twoThreadsAndFields = new SupportClass.HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
+            var oneThreadsAndFields = new HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
+			var twoThreadsAndFields = new HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
 			
 			foreach(var entry in threadsAndFields)
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessor.cs Sat Nov 12 10:44:21 2011
@@ -18,6 +18,7 @@
 using System;
 using System.Collections;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 
 namespace Lucene.Net.Index
 {
@@ -29,7 +30,7 @@ namespace Lucene.Net.Index
 	/// DocFieldConsumer.
 	/// </summary>
 	
-	sealed class DocFieldProcessor:DocConsumer
+	sealed class DocFieldProcessor : DocConsumer
 	{
 		
 		internal DocumentsWriter docWriter;
@@ -51,10 +52,9 @@ namespace Lucene.Net.Index
 			fieldsWriter.CloseDocStore(state);
 		}
 		
-		public override void  Flush(System.Collections.ICollection threads, SegmentWriteState state)
+		public override void Flush(ICollection<DocConsumerPerThread> threads, SegmentWriteState state)
 		{
-			
-			var childThreadsAndFields = new SupportClass.HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
+			var childThreadsAndFields = new HashMap<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>>();
 			foreach(DocConsumerPerThread thread in threads)
 			{
                 DocFieldProcessorPerThread perThread = (DocFieldProcessorPerThread)thread;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocFieldProcessorPerThread.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using Document = Lucene.Net.Documents.Document;
 using Fieldable = Lucene.Net.Documents.Fieldable;
 using ArrayUtil = Lucene.Net.Util.ArrayUtil;
@@ -317,7 +317,7 @@ namespace Lucene.Net.Index
 				return ;
 			}
 			
-			int mid = SupportClass.Number.URShift((lo + hi), 1);
+			int mid = Number.URShift((lo + hi), 1);
 			
 			if (String.CompareOrdinal(array[lo].fieldInfo.name, array[mid].fieldInfo.name) > 0)
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverter.cs Sat Nov 12 10:44:21 2011
@@ -17,6 +17,7 @@
 
 using System;
 using System.Collections.Generic;
+using Lucene.Net.Support;
 using AttributeSource = Lucene.Net.Util.AttributeSource;
 
 namespace Lucene.Net.Index
@@ -49,8 +50,8 @@ namespace Lucene.Net.Index
         public override void Flush(IDictionary<DocFieldConsumerPerThread, ICollection<DocFieldConsumerPerField>> threadsAndFields, SegmentWriteState state)
 		{
 
-            var childThreadsAndFields = new SupportClass.HashMap<InvertedDocConsumerPerThread, ICollection<InvertedDocConsumerPerField>>();
-            var endChildThreadsAndFields = new SupportClass.HashMap<InvertedDocEndConsumerPerThread, ICollection<InvertedDocEndConsumerPerField>>();
+            var childThreadsAndFields = new HashMap<InvertedDocConsumerPerThread, ICollection<InvertedDocConsumerPerField>>();
+            var endChildThreadsAndFields = new HashMap<InvertedDocEndConsumerPerThread, ICollection<InvertedDocEndConsumerPerField>>();
 
             foreach (var entry in threadsAndFields)
 			{

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerField.cs Sat Nov 12 10:44:21 2011
@@ -151,9 +151,9 @@ namespace Lucene.Net.Index
 							bool hasMoreTokens = stream.IncrementToken();
 							
 							fieldState.attributeSource = stream;
-							
-							OffsetAttribute offsetAttribute = fieldState.attributeSource.AddAttribute(typeof(OffsetAttribute));
-							PositionIncrementAttribute posIncrAttribute = fieldState.attributeSource.AddAttribute(typeof(PositionIncrementAttribute));
+
+                            OffsetAttribute offsetAttribute = fieldState.attributeSource.AddAttribute<OffsetAttribute>();
+							PositionIncrementAttribute posIncrAttribute = fieldState.attributeSource.AddAttribute<PositionIncrementAttribute>();
 							
 							consumer.Start(field);
 							

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocInverterPerThread.cs Sat Nov 12 10:44:21 2011
@@ -47,8 +47,8 @@ namespace Lucene.Net.Index
 
             internal SingleTokenAttributeSource()
 			{
-				termAttribute = AddAttribute(typeof(TermAttribute));
-				offsetAttribute = AddAttribute(typeof(OffsetAttribute));
+                termAttribute = AddAttribute<TermAttribute>();
+				offsetAttribute = AddAttribute<OffsetAttribute>();
 			}
 			
 			public void  Reinit(System.String stringValue, int startOffset, int endOffset)

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/DocumentsWriter.cs Sat Nov 12 10:44:21 2011
@@ -19,6 +19,7 @@ using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Threading;
+using Lucene.Net.Support;
 using Analyzer = Lucene.Net.Analysis.Analyzer;
 using Document = Lucene.Net.Documents.Document;
 using AlreadyClosedException = Lucene.Net.Store.AlreadyClosedException;
@@ -166,7 +167,7 @@ namespace Lucene.Net.Index
 		// than this they share ThreadStates
 		private const int MAX_THREAD_STATE = 5;
 		private DocumentsWriterThreadState[] threadStates = new DocumentsWriterThreadState[0];
-        private SupportClass.HashMap<SupportClass.ThreadClass, DocumentsWriterThreadState> threadBindings = new SupportClass.HashMap<SupportClass.ThreadClass, DocumentsWriterThreadState>();
+        private HashMap<ThreadClass, DocumentsWriterThreadState> threadBindings = new HashMap<ThreadClass, DocumentsWriterThreadState>();
 		
 		private int pauseThreads; // Non-zero when we need all threads to
 		// pause (eg to flush)
@@ -912,7 +913,7 @@ namespace Lucene.Net.Index
 				// First, find a thread state.  If this thread already
 				// has affinity to a specific ThreadState, use that one
 				// again.
-				DocumentsWriterThreadState state = threadBindings[SupportClass.ThreadClass.Current()];
+				DocumentsWriterThreadState state = threadBindings[ThreadClass.Current()];
 				if (state == null)
 				{
 					
@@ -939,7 +940,7 @@ namespace Lucene.Net.Index
 						state = newArray[threadStates.Length] = new DocumentsWriterThreadState(this);
 						threadStates = newArray;
 					}
-					threadBindings[SupportClass.ThreadClass.Current()] = state;
+					threadBindings[ThreadClass.Current()] = state;
 				}
 				
 				// Next, wait until my thread state is idle (in case
@@ -1365,7 +1366,7 @@ namespace Lucene.Net.Index
 				
 				// Delete by query
 				IndexSearcher searcher = new IndexSearcher(reader);
-				foreach(KeyValuePair<Query, int?> entry in deletesFlushed.queries)
+				foreach(KeyValuePair<Query, int> entry in deletesFlushed.queries)
 				{
 					Query query = (Query) entry.Key;
 					int limit = (int)entry.Value;

Modified: incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs
URL: http://svn.apache.org/viewvc/incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs?rev=1201243&r1=1201242&r2=1201243&view=diff
==============================================================================
--- incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs (original)
+++ incubator/lucene.net/branches/Lucene.Net.3.0.3/trunk/src/core/Index/FieldInfos.cs Sat Nov 12 10:44:21 2011
@@ -16,7 +16,7 @@
  */
 
 using System;
-
+using Lucene.Net.Support;
 using Document = Lucene.Net.Documents.Document;
 using Fieldable = Lucene.Net.Documents.Fieldable;
 using Directory = Lucene.Net.Store.Directory;
@@ -53,7 +53,7 @@ namespace Lucene.Net.Index
 		internal const byte OMIT_TERM_FREQ_AND_POSITIONS = (0x40);
 
         private System.Collections.Generic.List<FieldInfo> byNumber = new System.Collections.Generic.List<FieldInfo>();
-        private SupportClass.HashMap<string, FieldInfo> byName = new SupportClass.HashMap<string, FieldInfo>();
+        private HashMap<string, FieldInfo> byName = new HashMap<string, FieldInfo>();
 		private int format;
 		
 		public /*internal*/ FieldInfos()



Mime
View raw message