lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [17/50] [abbrv] lucenenet git commit: Lucene.Net.Core.Analysis.Analyzer refactor: de-nested TokenStreamComponents and ReuseStrategy classes so they don't need to be qualified when using Analyzer.NewAnonymous()
Date Sun, 05 Mar 2017 11:48:56 GMT
Lucene.Net.Core.Analysis.Analyzer refactor: de-nested TokenStreamComponents and ReuseStrategy
classes so they don't need to be qualified when using Analyzer.NewAnonymous()


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/994e6cfb
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/994e6cfb
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/994e6cfb

Branch: refs/heads/api-work
Commit: 994e6cfbde2364108d2ee12245f4a5c24871b951
Parents: 07ab669
Author: Shad Storhaug <shad@shadstorhaug.com>
Authored: Fri Mar 3 13:02:44 2017 +0700
Committer: Shad Storhaug <shad@shadstorhaug.com>
Committed: Sun Mar 5 17:08:35 2017 +0700

----------------------------------------------------------------------
 .../Analysis/Synonym/FSTSynonymFilterFactory.cs |   2 +-
 src/Lucene.Net.Core/Analysis/Analyzer.cs        | 283 ++++++++++---------
 .../Analyzing/AnalyzingInfixSuggester.cs        |   2 +-
 .../Path/TestReversePathHierarchyTokenizer.cs   |   4 +-
 .../Analysis/Pattern/TestPatternTokenizer.cs    |   4 +-
 .../Pt/TestPortugueseLightStemFilter.cs         |   2 +-
 .../Pt/TestPortugueseMinimalStemFilter.cs       |   4 +-
 .../Analysis/Snowball/TestSnowball.cs           |   4 +-
 .../Suggest/Analyzing/AnalyzingSuggesterTest.cs |  14 +-
 .../Suggest/Analyzing/FuzzySuggesterTest.cs     |   6 +-
 .../Codecs/Lucene41/TestBlockPostingsFormat3.cs |   3 +-
 .../Index/TestDocInverterPerFieldErrorInfo.cs   |   1 +
 .../Index/TestIndexWriterDelete.cs              |   1 +
 .../Index/TestIndexWriterExceptions.cs          |   7 +-
 src/Lucene.Net.Tests/Index/TestTermdocPerf.cs   |   1 +
 .../Search/FuzzyTermOnShortTermsTest.cs         |   1 +
 .../Search/Spans/TestPayloadSpans.cs            |   1 +
 src/Lucene.Net.Tests/Util/TestQueryBuilder.cs   |   3 +-
 18 files changed, 177 insertions(+), 166 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
index 167e17c..d24bbe3 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Synonym/FSTSynonymFilterFactory.cs
@@ -115,7 +115,7 @@ namespace Lucene.Net.Analysis.Synonym
                 Tokenizer tokenizer = factory == null ? new WhitespaceTokenizer(LuceneVersion.LUCENE_CURRENT,
reader) : factory.Create(reader);
                 TokenStream stream = outerInstance.ignoreCase ? (TokenStream)new LowerCaseFilter(LuceneVersion.LUCENE_CURRENT,
tokenizer) : tokenizer;
 #pragma warning restore 612, 618
-                return new Analyzer.TokenStreamComponents(tokenizer, stream);
+                return new TokenStreamComponents(tokenizer, stream);
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Core/Analysis/Analyzer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Analysis/Analyzer.cs b/src/Lucene.Net.Core/Analysis/Analyzer.cs
index 51f0ab1..97c6ef9 100644
--- a/src/Lucene.Net.Core/Analysis/Analyzer.cs
+++ b/src/Lucene.Net.Core/Analysis/Analyzer.cs
@@ -307,146 +307,8 @@ namespace Lucene.Net.Analysis
             }
         }
 
-        /// <summary>
-        /// This class encapsulates the outer components of a token stream. It provides
-        /// access to the source (<see cref="Analysis.Tokenizer"/>) and the outer end
(sink), an
-        /// instance of <see cref="TokenFilter"/> which also serves as the
-        /// <see cref="Analysis.TokenStream"/> returned by
-        /// <seealso cref="Analyzer.GetTokenStream(string, TextReader)"/>.
-        /// </summary>
-        public class TokenStreamComponents
-        {
-            /// <summary>
-            /// Original source of the tokens.
-            /// </summary>
-            protected readonly Tokenizer m_source;
-
-            /// <summary>
-            /// Sink tokenstream, such as the outer tokenfilter decorating
-            /// the chain. This can be the source if there are no filters.
-            /// </summary>
-            protected readonly TokenStream m_sink;
-
-            /// <summary>
-            /// Internal cache only used by <see cref="Analyzer.GetTokenStream(string,
string)"/>. </summary>
-            internal ReusableStringReader reusableStringReader;
-
-            /// <summary>
-            /// Creates a new <see cref="TokenStreamComponents"/> instance.
-            /// </summary>
-            /// <param name="source">
-            ///          the analyzer's tokenizer </param>
-            /// <param name="result">
-            ///          the analyzer's resulting token stream </param>
-            public TokenStreamComponents(Tokenizer source, TokenStream result)
-            {
-                this.m_source = source;
-                this.m_sink = result;
-            }
-
-            /// <summary>
-            /// Creates a new <see cref="TokenStreamComponents"/> instance.
-            /// </summary>
-            /// <param name="source">
-            ///          the analyzer's tokenizer </param>
-            public TokenStreamComponents(Tokenizer source)
-            {
-                this.m_source = source;
-                this.m_sink = source;
-            }
-
-            /// <summary>
-            /// Resets the encapsulated components with the given reader. If the components
-            /// cannot be reset, an Exception should be thrown.
-            /// </summary>
-            /// <param name="reader">
-            ///          a reader to reset the source component </param>
-            /// <exception cref="IOException">
-            ///           if the component's reset method throws an <seealso cref="IOException"/>
</exception>
-            protected internal virtual void SetReader(TextReader reader)
-            {
-                m_source.SetReader(reader);
-            }
-
-            /// <summary>
-            /// Returns the sink <see cref="Analysis.TokenStream"/>
-            /// </summary>
-            /// <returns> the sink <see cref="Analysis.TokenStream"/> </returns>
-            public virtual TokenStream TokenStream
-            {
-                get
-                {
-                    return m_sink;
-                }
-            }
-
-            /// <summary>
-            /// Returns the component's <see cref="Analysis.Tokenizer"/>
-            /// </summary>
-            /// <returns> Component's <see cref="Analysis.Tokenizer"/> </returns>
-            public virtual Tokenizer Tokenizer
-            {
-                get
-                {
-                    return m_source;
-                }
-            }
-        }
-
-        /// <summary>
-        /// Strategy defining how <see cref="TokenStreamComponents"/> are reused per
call to
-        /// <see cref="Analyzer.GetTokenStream(string, TextReader)"/>.
-        /// </summary>
-        public abstract class ReuseStrategy
-        {
-            /// <summary>
-            /// Gets the reusable <see cref="TokenStreamComponents"/> for the field
with the given name.
-            /// </summary>
-            /// <param name="analyzer"> <see cref="Analyzer"/> from which to
get the reused components. Use
-            ///        <see cref="GetStoredValue(Analyzer)"/> and <see cref="SetStoredValue(Analyzer,
object)"/>
-            ///        to access the data on the <see cref="Analyzer"/>. </param>
-            /// <param name="fieldName"> Name of the field whose reusable <see cref="TokenStreamComponents"/>
-            ///        are to be retrieved </param>
-            /// <returns> Reusable <see cref="TokenStreamComponents"/> for the
field, or <c>null</c>
-            ///         if there was no previous components for the field </returns>
-            public abstract TokenStreamComponents GetReusableComponents(Analyzer analyzer,
string fieldName);
-
-            /// <summary>
-            /// Stores the given <see cref="TokenStreamComponents"/> as the reusable
components for the
-            /// field with the give name.
-            /// </summary>
-            /// <param name="fieldName"> Name of the field whose <see cref="TokenStreamComponents"/>
are being set </param>
-            /// <param name="components"> <see cref="TokenStreamComponents"/>
which are to be reused for the field </param>
-            public abstract void SetReusableComponents(Analyzer analyzer, string fieldName,
TokenStreamComponents components);
-
-            /// <summary>
-            /// Returns the currently stored value.
-            /// </summary>
-            /// <returns> Currently stored value or <c>null</c> if no value
is stored </returns>
-            /// <exception cref="AlreadyClosedException"> if the <see cref="Analyzer"/>
is closed. </exception>
-            protected internal object GetStoredValue(Analyzer analyzer)
-            {
-                if (analyzer.storedValue == null)
-                {
-                    throw new AlreadyClosedException("this Analyzer is closed");
-                }
-                return analyzer.storedValue.Get();
-            }
-
-            /// <summary>
-            /// Sets the stored value.
-            /// </summary>
-            /// <param name="storedValue"> Value to store </param>
-            /// <exception cref="AlreadyClosedException"> if the <see cref="Analyzer"/>
is closed. </exception>
-            protected internal void SetStoredValue(Analyzer analyzer, object storedValue)
-            {
-                if (analyzer.storedValue == null)
-                {
-                    throw new AlreadyClosedException("this Analyzer is closed");
-                }
-                analyzer.storedValue.Set(storedValue);
-            }
-        }
+        // LUCENENET specific - de-nested TokenStreamComponents and ReuseStrategy
+        // so they don't need to be qualified when used outside of Analyzer subclasses.
 
         /// <summary>
         /// A predefined <see cref="ReuseStrategy"/>  that reuses the same components
for
@@ -553,4 +415,145 @@ namespace Lucene.Net.Analysis
             }
         }
     }
+
+    /// <summary>
+    /// This class encapsulates the outer components of a token stream. It provides
+    /// access to the source (<see cref="Analysis.Tokenizer"/>) and the outer end (sink),
an
+    /// instance of <see cref="TokenFilter"/> which also serves as the
+    /// <see cref="Analysis.TokenStream"/> returned by
+    /// <seealso cref="Analyzer.GetTokenStream(string, TextReader)"/>.
+    /// </summary>
+    public class TokenStreamComponents
+    {
+        /// <summary>
+        /// Original source of the tokens.
+        /// </summary>
+        protected readonly Tokenizer m_source;
+
+        /// <summary>
+        /// Sink tokenstream, such as the outer tokenfilter decorating
+        /// the chain. This can be the source if there are no filters.
+        /// </summary>
+        protected readonly TokenStream m_sink;
+
+        /// <summary>
+        /// Internal cache only used by <see cref="Analyzer.GetTokenStream(string, string)"/>.
</summary>
+        internal ReusableStringReader reusableStringReader;
+
+        /// <summary>
+        /// Creates a new <see cref="TokenStreamComponents"/> instance.
+        /// </summary>
+        /// <param name="source">
+        ///          the analyzer's tokenizer </param>
+        /// <param name="result">
+        ///          the analyzer's resulting token stream </param>
+        public TokenStreamComponents(Tokenizer source, TokenStream result)
+        {
+            this.m_source = source;
+            this.m_sink = result;
+        }
+
+        /// <summary>
+        /// Creates a new <see cref="TokenStreamComponents"/> instance.
+        /// </summary>
+        /// <param name="source">
+        ///          the analyzer's tokenizer </param>
+        public TokenStreamComponents(Tokenizer source)
+        {
+            this.m_source = source;
+            this.m_sink = source;
+        }
+
+        /// <summary>
+        /// Resets the encapsulated components with the given reader. If the components
+        /// cannot be reset, an Exception should be thrown.
+        /// </summary>
+        /// <param name="reader">
+        ///          a reader to reset the source component </param>
+        /// <exception cref="IOException">
+        ///           if the component's reset method throws an <seealso cref="IOException"/>
</exception>
+        protected internal virtual void SetReader(TextReader reader)
+        {
+            m_source.SetReader(reader);
+        }
+
+        /// <summary>
+        /// Returns the sink <see cref="Analysis.TokenStream"/>
+        /// </summary>
+        /// <returns> the sink <see cref="Analysis.TokenStream"/> </returns>
+        public virtual TokenStream TokenStream
+        {
+            get
+            {
+                return m_sink;
+            }
+        }
+
+        /// <summary>
+        /// Returns the component's <see cref="Analysis.Tokenizer"/>
+        /// </summary>
+        /// <returns> Component's <see cref="Analysis.Tokenizer"/> </returns>
+        public virtual Tokenizer Tokenizer
+        {
+            get
+            {
+                return m_source;
+            }
+        }
+    }
+
+    /// <summary>
+    /// Strategy defining how <see cref="TokenStreamComponents"/> are reused per call
to
+    /// <see cref="Analyzer.GetTokenStream(string, TextReader)"/>.
+    /// </summary>
+    public abstract class ReuseStrategy
+    {
+        /// <summary>
+        /// Gets the reusable <see cref="TokenStreamComponents"/> for the field with
the given name.
+        /// </summary>
+        /// <param name="analyzer"> <see cref="Analyzer"/> from which to get
the reused components. Use
+        ///        <see cref="GetStoredValue(Analyzer)"/> and <see cref="SetStoredValue(Analyzer,
object)"/>
+        ///        to access the data on the <see cref="Analyzer"/>. </param>
+        /// <param name="fieldName"> Name of the field whose reusable <see cref="TokenStreamComponents"/>
+        ///        are to be retrieved </param>
+        /// <returns> Reusable <see cref="TokenStreamComponents"/> for the field,
or <c>null</c>
+        ///         if there was no previous components for the field </returns>
+        public abstract TokenStreamComponents GetReusableComponents(Analyzer analyzer, string
fieldName);
+
+        /// <summary>
+        /// Stores the given <see cref="TokenStreamComponents"/> as the reusable components
for the
+        /// field with the give name.
+        /// </summary>
+        /// <param name="fieldName"> Name of the field whose <see cref="TokenStreamComponents"/>
are being set </param>
+        /// <param name="components"> <see cref="TokenStreamComponents"/> which
are to be reused for the field </param>
+        public abstract void SetReusableComponents(Analyzer analyzer, string fieldName, TokenStreamComponents
components);
+
+        /// <summary>
+        /// Returns the currently stored value.
+        /// </summary>
+        /// <returns> Currently stored value or <c>null</c> if no value
is stored </returns>
+        /// <exception cref="AlreadyClosedException"> if the <see cref="Analyzer"/>
is closed. </exception>
+        protected internal object GetStoredValue(Analyzer analyzer)
+        {
+            if (analyzer.storedValue == null)
+            {
+                throw new AlreadyClosedException("this Analyzer is closed");
+            }
+            return analyzer.storedValue.Get();
+        }
+
+        /// <summary>
+        /// Sets the stored value.
+        /// </summary>
+        /// <param name="storedValue"> Value to store </param>
+        /// <exception cref="AlreadyClosedException"> if the <see cref="Analyzer"/>
is closed. </exception>
+        protected internal void SetStoredValue(Analyzer analyzer, object storedValue)
+        {
+            if (analyzer.storedValue == null)
+            {
+                throw new AlreadyClosedException("this Analyzer is closed");
+            }
+            analyzer.storedValue.Set(storedValue);
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
index 0a05e5f..28d7220 100644
--- a/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
+++ b/src/Lucene.Net.Suggest/Suggest/Analyzing/AnalyzingInfixSuggester.cs
@@ -256,7 +256,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         {
             private readonly AnalyzingInfixSuggester outerInstance;
 
-            public AnalyzerWrapperAnonymousInnerClassHelper(AnalyzingInfixSuggester outerInstance,
Analyzer.ReuseStrategy reuseStrategy)
+            public AnalyzerWrapperAnonymousInnerClassHelper(AnalyzingInfixSuggester outerInstance,
ReuseStrategy reuseStrategy)
                 : base(reuseStrategy)
             {
                 this.outerInstance = outerInstance;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Analysis.Common/Analysis/Path/TestReversePathHierarchyTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Path/TestReversePathHierarchyTokenizer.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Path/TestReversePathHierarchyTokenizer.cs
index b1a35c1..9c3ae0a 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Path/TestReversePathHierarchyTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Path/TestReversePathHierarchyTokenizer.cs
@@ -140,7 +140,7 @@ namespace Lucene.Net.Analysis.Path
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer tokenizer = new ReversePathHierarchyTokenizer(reader);
-                return new Analyzer.TokenStreamComponents(tokenizer, tokenizer);
+                return new TokenStreamComponents(tokenizer, tokenizer);
             }
         }
 
@@ -166,7 +166,7 @@ namespace Lucene.Net.Analysis.Path
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer tokenizer = new ReversePathHierarchyTokenizer(reader);
-                return new Analyzer.TokenStreamComponents(tokenizer, tokenizer);
+                return new TokenStreamComponents(tokenizer, tokenizer);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
index 05cf0af..98a4062 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pattern/TestPatternTokenizer.cs
@@ -138,7 +138,7 @@ namespace Lucene.Net.Analysis.Pattern
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer tokenizer = new PatternTokenizer(reader, new Regex("a", RegexOptions.Compiled),
-1);
-                return new Analyzer.TokenStreamComponents(tokenizer);
+                return new TokenStreamComponents(tokenizer);
             }
         }
 
@@ -154,7 +154,7 @@ namespace Lucene.Net.Analysis.Pattern
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer tokenizer = new PatternTokenizer(reader, new Regex("a", RegexOptions.Compiled),
0);
-                return new Analyzer.TokenStreamComponents(tokenizer);
+                return new TokenStreamComponents(tokenizer);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseLightStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseLightStemFilter.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseLightStemFilter.cs
index e8df79e..77641f9 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseLightStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseLightStemFilter.cs
@@ -115,7 +115,7 @@ namespace Lucene.Net.Analysis.Pt
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
-                return new Analyzer.TokenStreamComponents(source, new PortugueseLightStemFilter(sink));
+                return new TokenStreamComponents(source, new PortugueseLightStemFilter(sink));
             }
         }
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseMinimalStemFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseMinimalStemFilter.cs
b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseMinimalStemFilter.cs
index 5587049..65bdc33 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseMinimalStemFilter.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Pt/TestPortugueseMinimalStemFilter.cs
@@ -87,7 +87,7 @@ namespace Lucene.Net.Analysis.Pt
             {
                 Tokenizer source = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
                 TokenStream sink = new SetKeywordMarkerFilter(source, exclusionSet);
-                return new Analyzer.TokenStreamComponents(source, new PortugueseMinimalStemFilter(sink));
+                return new TokenStreamComponents(source, new PortugueseMinimalStemFilter(sink));
             }
         }
 
@@ -118,7 +118,7 @@ namespace Lucene.Net.Analysis.Pt
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
-                return new Analyzer.TokenStreamComponents(tokenizer, new PortugueseMinimalStemFilter(tokenizer));
+                return new TokenStreamComponents(tokenizer, new PortugueseMinimalStemFilter(tokenizer));
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Analysis.Common/Analysis/Snowball/TestSnowball.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Snowball/TestSnowball.cs b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Snowball/TestSnowball.cs
index 5f9157a..ccbc3d0 100644
--- a/src/Lucene.Net.Tests.Analysis.Common/Analysis/Snowball/TestSnowball.cs
+++ b/src/Lucene.Net.Tests.Analysis.Common/Analysis/Snowball/TestSnowball.cs
@@ -179,7 +179,7 @@ namespace Lucene.Net.Analysis.Snowball
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer tokenizer = new KeywordTokenizer(reader);
-                return new Analyzer.TokenStreamComponents(tokenizer, new SnowballFilter(tokenizer,
lang));
+                return new TokenStreamComponents(tokenizer, new SnowballFilter(tokenizer,
lang));
             }
         }
 
@@ -213,7 +213,7 @@ namespace Lucene.Net.Analysis.Snowball
             protected internal override TokenStreamComponents CreateComponents(string fieldName,
TextReader reader)
             {
                 Tokenizer t = new MockTokenizer(reader);
-                return new Analyzer.TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
+                return new TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
index 1b53a0e..8e87e2e 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/AnalyzingSuggesterTest.cs
@@ -266,7 +266,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertEquals("abcd", r.ElementAt(0).Key.toString());
         }
 
-        internal class TestGraphDupsTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestGraphDupsTokenStreamComponents : TokenStreamComponents
         {
             private readonly AnalyzingSuggesterTest outerInstance;
             internal int tokenStreamCounter = 0;
@@ -356,7 +356,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertEquals(10, results.ElementAt(1).Value);
         }
 
-        internal class TestInputPathRequiredTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestInputPathRequiredTokenStreamComponents : TokenStreamComponents
         {
             private readonly AnalyzingSuggesterTest outerInstance;
             internal int tokenStreamCounter = 0;
@@ -469,7 +469,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         } 
         */
 
-        internal class UsualTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class UsualTokenStreamComponents : TokenStreamComponents
         {
             private readonly AnalyzingSuggesterTest outerInstance;
             internal int count;
@@ -1171,7 +1171,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertEquals(3, results.ElementAt(2).Value);
         }
 
-        internal class TestDupSurfaceFormsMissingResultsTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestDupSurfaceFormsMissingResultsTokenStreamComponents : TokenStreamComponents
         {
             private readonly AnalyzingSuggesterTest outerInstance;
             public TestDupSurfaceFormsMissingResultsTokenStreamComponents(AnalyzingSuggesterTest
outerInstance, Tokenizer tokenizer)
@@ -1250,7 +1250,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertEquals("nellie", results.ElementAt(1).Key);
             assertEquals(5, results.ElementAt(1).Value);
         }
-        internal class TestDupSurfaceFormsMissingResults2TokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestDupSurfaceFormsMissingResults2TokenStreamComponents : TokenStreamComponents
         {
             internal int count;
             public TestDupSurfaceFormsMissingResults2TokenStreamComponents(Tokenizer tokenizer)
@@ -1335,7 +1335,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertEquals(5, results.ElementAt(1).Value);
         }
 
-        internal class Test0ByteKeysTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class Test0ByteKeysTokenStreamComponents : TokenStreamComponents
         {
             internal int tokenStreamCounter = 0;
             internal TokenStream[] tokenStreams = new TokenStream[] {
@@ -1422,7 +1422,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertEquals("[i love lucy/7]", suggester.DoLookup("i ", false, 3).toString());
         }
 
-        internal class TestTooManyExpressionsTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestTooManyExpressionsTokenStreamComponents : TokenStreamComponents
         {
             public TestTooManyExpressionsTokenStreamComponents(Tokenizer tokenizer)
                 : base(tokenizer)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
index 991233f..3809d1f 100644
--- a/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
+++ b/src/Lucene.Net.Tests.Suggest/Suggest/Analyzing/FuzzySuggesterTest.cs
@@ -216,7 +216,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             // appear first:
             assertEquals("abcd", r.ElementAt(0).Key.toString());
         }
-        internal class TestGraphDupsTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestGraphDupsTokenStreamComponents : TokenStreamComponents
         {
             private readonly FuzzySuggesterTest outerInstance;
             internal int tokenStreamCounter = 0;
@@ -314,7 +314,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
             assertTrue(!result.Any());
         }
 
-        internal class TestInputPathRequiredTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class TestInputPathRequiredTokenStreamComponents : TokenStreamComponents
         {
             private readonly FuzzySuggesterTest outerInstance;
             internal int tokenStreamCounter = 0;
@@ -422,7 +422,7 @@ namespace Lucene.Net.Search.Suggest.Analyzing
         } 
         */
 
-        internal class UsualTokenStreamComponents : Analyzer.TokenStreamComponents
+        internal class UsualTokenStreamComponents : TokenStreamComponents
         {
             private readonly FuzzySuggesterTest outerInstance;
             internal int count;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
index 3e6b0e4..5a0b6bd 100644
--- a/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
+++ b/src/Lucene.Net.Tests/Codecs/Lucene41/TestBlockPostingsFormat3.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Analysis;
 using Lucene.Net.Attributes;
 using Lucene.Net.Documents;
 using Lucene.Net.Index;
@@ -149,7 +150,7 @@ namespace Lucene.Net.Codecs.Lucene41
         {
             private readonly TestBlockPostingsFormat3 OuterInstance;
 
-            public AnalyzerAnonymousInnerClassHelper(TestBlockPostingsFormat3 outerInstance,
Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+            public AnalyzerAnonymousInnerClassHelper(TestBlockPostingsFormat3 outerInstance,
ReuseStrategy PER_FIELD_REUSE_STRATEGY)
                 : base(PER_FIELD_REUSE_STRATEGY)
             {
                 this.OuterInstance = outerInstance;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
index 0e498c8..40d79a3 100644
--- a/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
+++ b/src/Lucene.Net.Tests/Index/TestDocInverterPerFieldErrorInfo.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 using Lucene.Net.Support;
 using System;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
index 6ac040a..3ad1330 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs
@@ -3,6 +3,7 @@ using System.Collections.Generic;
 using System.Diagnostics;
 using System.Text;
 using System.Threading;
+using Lucene.Net.Analysis;
 using Lucene.Net.Attributes;
 using Lucene.Net.Documents;
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
index 5d45b7d..7408cfe 100644
--- a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
+++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Analysis;
 using System;
 using System.Collections.Generic;
 using System.Diagnostics;
@@ -537,7 +538,7 @@ namespace Lucene.Net.Index
         {
             private readonly TestIndexWriterExceptions OuterInstance;
 
-            public TEJBFAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance,
Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+            public TEJBFAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance,
ReuseStrategy PER_FIELD_REUSE_STRATEGY)
                 : base(PER_FIELD_REUSE_STRATEGY)
             {
                 this.OuterInstance = outerInstance;
@@ -878,7 +879,7 @@ namespace Lucene.Net.Index
         {
             private readonly TestIndexWriterExceptions OuterInstance;
 
-            public TDWEAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance,
Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+            public TDWEAnalyzerAnonymousInnerClassHelper(TestIndexWriterExceptions outerInstance,
ReuseStrategy PER_FIELD_REUSE_STRATEGY)
                 : base(PER_FIELD_REUSE_STRATEGY)
             {
                 this.OuterInstance = outerInstance;
@@ -977,7 +978,7 @@ namespace Lucene.Net.Index
         {
             private readonly TestIndexWriterExceptions OuterInstance;
 
-            public AnalyzerAnonymousInnerClassHelper2(TestIndexWriterExceptions outerInstance,
Analyzer.ReuseStrategy PER_FIELD_REUSE_STRATEGY)
+            public AnalyzerAnonymousInnerClassHelper2(TestIndexWriterExceptions outerInstance,
ReuseStrategy PER_FIELD_REUSE_STRATEGY)
                 : base(PER_FIELD_REUSE_STRATEGY)
             {
                 this.OuterInstance = outerInstance;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
index c76b4ee..aa76324 100644
--- a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
+++ b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using Lucene.Net.Attributes;
 using Lucene.Net.Documents;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs b/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
index 5638f8a..ee59021 100644
--- a/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
+++ b/src/Lucene.Net.Tests/Search/FuzzyTermOnShortTermsTest.cs
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 
+using Lucene.Net.Analysis;
 using Lucene.Net.Documents;
 
 namespace Lucene.Net.Search

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs b/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
index a9393b4..d50a062 100644
--- a/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
+++ b/src/Lucene.Net.Tests/Search/Spans/TestPayloadSpans.cs
@@ -1,3 +1,4 @@
+using Lucene.Net.Analysis;
 using Lucene.Net.Analysis.TokenAttributes;
 using System;
 using System.Collections.Generic;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/994e6cfb/src/Lucene.Net.Tests/Util/TestQueryBuilder.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/Util/TestQueryBuilder.cs b/src/Lucene.Net.Tests/Util/TestQueryBuilder.cs
index 0945ca3..e61526b 100644
--- a/src/Lucene.Net.Tests/Util/TestQueryBuilder.cs
+++ b/src/Lucene.Net.Tests/Util/TestQueryBuilder.cs
@@ -1,4 +1,5 @@
-´╗┐using Lucene.Net.Analysis.TokenAttributes;
+´╗┐using Lucene.Net.Analysis;
+using Lucene.Net.Analysis.TokenAttributes;
 using NUnit.Framework;
 using System.IO;
 


Mime
View raw message