lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nightowl...@apache.org
Subject [02/39] lucenenet git commit: Lucene.Net.Analysis.Sinks refactor: member accessibility and documentation comments
Date Sat, 04 Feb 2017 20:32:21 GMT
Lucene.Net.Analysis.Sinks refactor: member accessibility and documentation comments


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/f3a14dbc
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/f3a14dbc
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/f3a14dbc

Branch: refs/heads/api-work
Commit: f3a14dbccf1c9ee410be7ed2b11ae3abdc6d9435
Parents: 6ea63fd
Author: Shad Storhaug <shad@shadstorhaug.com>
Authored: Sat Feb 4 09:03:55 2017 +0700
Committer: Shad Storhaug <shad@shadstorhaug.com>
Committed: Sat Feb 4 09:03:55 2017 +0700

----------------------------------------------------------------------
 .../Analysis/Sinks/DateRecognizerSinkFilter.cs  | 13 ++--
 .../Analysis/Sinks/TeeSinkTokenFilter.cs        | 68 ++++++++++----------
 .../Analysis/Sinks/TokenRangeSinkFilter.cs      |  2 -
 3 files changed, 40 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3a14dbc/src/Lucene.Net.Analysis.Common/Analysis/Sinks/DateRecognizerSinkFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/DateRecognizerSinkFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/DateRecognizerSinkFilter.cs
index bb31ae6..ee634e2 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/DateRecognizerSinkFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/DateRecognizerSinkFilter.cs
@@ -32,17 +32,17 @@ namespace Lucene.Net.Analysis.Sinks
     /// </summary>
     public class DateRecognizerSinkFilter : TeeSinkTokenFilter.SinkFilter
     {
-        protected internal DateTimeStyles m_style;
-        protected internal ICharTermAttribute m_termAtt;
-        protected internal IFormatProvider m_culture;
-        protected internal string[] m_formats;
+        protected DateTimeStyles m_style;
+        protected ICharTermAttribute m_termAtt;
+        protected IFormatProvider m_culture;
+        protected string[] m_formats;
 
         /// <summary>
         /// Creates a new instance of <see cref="DateRecognizerSinkFilter"/> using
the current culture and <see cref="DateTimeStyles.None"/>.
         /// Loosely matches standard DateTime formats using <see cref="DateTime.TryParse(string,
IFormatProvider, DateTimeStyles, out DateTime)"/>.
         /// </summary>
         public DateRecognizerSinkFilter()
-              : this((string[])null, DateTimeFormatInfo.CurrentInfo, DateTimeStyles.None)
+            : this((string[])null, DateTimeFormatInfo.CurrentInfo, DateTimeStyles.None)
         { }
 
         /// <summary>
@@ -78,12 +78,11 @@ namespace Lucene.Net.Analysis.Sinks
         /// Creates a new instance of <see cref="DateRecognizerSinkFilter"/>  using
the supplied culture and <see cref="DateTimeStyles"/>.
         /// Loosely matches standard DateTime formats using <see cref="DateTime.TryParse(string,
IFormatProvider, DateTimeStyles, out DateTime)"/>.
         /// </summary>
-        /// If supplied, one of them must match the format of the date exactly to get a match.</param>
         /// <param name="culture">An object that supplies culture-specific format information</param>
         /// <param name="style">A bitwise combination of enumeration values that indicates
the permitted format of s. 
         /// A typical value to specify is <see cref="DateTimeStyles.None"/></param>
         public DateRecognizerSinkFilter(IFormatProvider culture, DateTimeStyles style)
-            :this((string[])null, culture, style)
+            : this((string[])null, culture, style)
         { }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3a14dbc/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
index 1538470..e1eeabe 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TeeSinkTokenFilter.cs
@@ -1,6 +1,6 @@
-´╗┐using System;
+´╗┐using Lucene.Net.Util;
+using System;
 using System.Collections.Generic;
-using Lucene.Net.Util;
 
 namespace Lucene.Net.Analysis.Sinks
 {
@@ -25,43 +25,43 @@ namespace Lucene.Net.Analysis.Sinks
     /// This TokenFilter provides the ability to set aside attribute states
     /// that have already been analyzed.  This is useful in situations where multiple fields
share
     /// many common analysis steps and then go their separate ways.
-    /// <p/>
+    /// <para/>
     /// It is also useful for doing things like entity extraction or proper noun analysis
as
     /// part of the analysis workflow and saving off those tokens for use in another field.
-    /// 
+    /// <para/>
     /// <code>
     /// TeeSinkTokenFilter source1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(version,
reader1));
-    /// TeeSinkTokenFilter.SinkTokenStream sink1 = source1.newSinkTokenStream();
-    /// TeeSinkTokenFilter.SinkTokenStream sink2 = source1.newSinkTokenStream();
+    /// TeeSinkTokenFilter.SinkTokenStream sink1 = source1.NewSinkTokenStream();
+    /// TeeSinkTokenFilter.SinkTokenStream sink2 = source1.NewSinkTokenStream();
     /// 
     /// TeeSinkTokenFilter source2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(version,
reader2));
-    /// source2.addSinkTokenStream(sink1);
-    /// source2.addSinkTokenStream(sink2);
+    /// source2.AddSinkTokenStream(sink1);
+    /// source2.AddSinkTokenStream(sink2);
     /// 
     /// TokenStream final1 = new LowerCaseFilter(version, source1);
     /// TokenStream final2 = source2;
     /// TokenStream final3 = new EntityDetect(sink1);
     /// TokenStream final4 = new URLDetect(sink2);
     /// 
-    /// d.add(new TextField("f1", final1, Field.Store.NO));
-    /// d.add(new TextField("f2", final2, Field.Store.NO));
-    /// d.add(new TextField("f3", final3, Field.Store.NO));
-    /// d.add(new TextField("f4", final4, Field.Store.NO));
+    /// d.Add(new TextField("f1", final1, Field.Store.NO));
+    /// d.Add(new TextField("f2", final2, Field.Store.NO));
+    /// d.Add(new TextField("f3", final3, Field.Store.NO));
+    /// d.Add(new TextField("f4", final4, Field.Store.NO));
     /// </code>
-    /// In this example, <code>sink1</code> and <code>sink2</code>
will both get tokens from both
-    /// <code>reader1</code> and <code>reader2</code> after whitespace
tokenizer
+    /// In this example, <c>sink1</c> and <c>sink2</c> will both
get tokens from both
+    /// <c>reader1</c> and <c>reader2</c> after whitespace tokenizer
     /// and now we can further wrap any of these in extra analysis, and more "sources" can
be inserted if desired.
     /// It is important, that tees are consumed before sinks (in the above example, the field
names must be
     /// less the sink's field names). If you are not sure, which stream is consumed first,
you can simply
-    /// add another sink and then pass all tokens to the sinks at once using <see cref="#consumeAllTokens"/>.
-    /// This TokenFilter is exhausted after this. In the above example, change
+    /// add another sink and then pass all tokens to the sinks at once using <see cref="ConsumeAllTokens"/>.
+    /// This <see cref="TokenFilter"/> is exhausted after this. In the above example,
change
     /// the example above to:
     /// <code>
     /// ...
-    /// TokenStream final1 = new LowerCaseFilter(version, source1.newSinkTokenStream());
-    /// TokenStream final2 = source2.newSinkTokenStream();
-    /// sink1.consumeAllTokens();
-    /// sink2.consumeAllTokens();
+    /// TokenStream final1 = new LowerCaseFilter(version, source1.NewSinkTokenStream());
+    /// TokenStream final2 = source2.NewSinkTokenStream();
+    /// sink1.ConsumeAllTokens();
+    /// sink2.ConsumeAllTokens();
     /// ...
     /// </code>
     /// In this case, the fields can be added in any order, because the sources are not used
anymore and all sinks are ready.
@@ -73,7 +73,7 @@ namespace Lucene.Net.Analysis.Sinks
         private readonly ICollection<WeakReference<SinkTokenStream>> sinks =
new LinkedList<WeakReference<SinkTokenStream>>();
 
         /// <summary>
-        /// Instantiates a new TeeSinkTokenFilter.
+        /// Instantiates a new <see cref="TeeSinkTokenFilter"/>.
         /// </summary>
         public TeeSinkTokenFilter(TokenStream input)
             : base(input)
@@ -91,7 +91,7 @@ namespace Lucene.Net.Analysis.Sinks
         /// <summary>
         /// Returns a new <see cref="SinkTokenStream"/> that receives all tokens consumed
by this stream
         /// that pass the supplied filter. </summary>
-        /// <seealso cref= SinkFilter></seealso>
+        /// <seealso cref="SinkFilter"/>
         public SinkTokenStream NewSinkTokenStream(SinkFilter filter)
         {
             var sink = new SinkTokenStream(CloneAttributes(), filter);
@@ -100,7 +100,7 @@ namespace Lucene.Net.Analysis.Sinks
         }
 
         /// <summary>
-        /// Adds a <see cref="SinkTokenStream"/> created by another <code>TeeSinkTokenFilter</code>
+        /// Adds a <see cref="SinkTokenStream"/> created by another <see cref="TeeSinkTokenFilter"/>
         /// to this one. The supplied stream will also receive all consumed tokens.
         /// This method can be used to pass tokens from two different tees to one sink.
         /// </summary>
@@ -120,7 +120,7 @@ namespace Lucene.Net.Analysis.Sinks
         }
 
         /// <summary>
-        /// <code>TeeSinkTokenFilter</code> passes all tokens to the added sinks
+        /// <see cref="TeeSinkTokenFilter"/> passes all tokens to the added sinks
         /// when itself is consumed. To be sure, that all tokens from the input
         /// stream are passed to the sinks, you can call this methods.
         /// This instance is exhausted after this, but all sinks are instant available.
@@ -159,7 +159,7 @@ namespace Lucene.Net.Analysis.Sinks
             return false;
         }
 
-        public override void End()
+        public override sealed void End()
         {
             base.End();
             AttributeSource.State finalState = CaptureState();
@@ -185,7 +185,7 @@ namespace Lucene.Net.Analysis.Sinks
             public abstract bool Accept(AttributeSource source);
 
             /// <summary>
-            /// Called by <see cref="SinkTokenStream#reset()"/>. This method does nothing
by default
+            /// Called by <see cref="SinkTokenStream.Reset()"/>. This method does nothing
by default
             /// and can optionally be overridden.
             /// </summary>
             public virtual void Reset()
@@ -195,14 +195,14 @@ namespace Lucene.Net.Analysis.Sinks
         }
 
         /// <summary>
-        /// TokenStream output from a tee with optional filtering.
+        /// <see cref="TokenStream"/> output from a tee with optional filtering.
         /// </summary>
         public sealed class SinkTokenStream : TokenStream
         {
-            internal readonly IList<AttributeSource.State> cachedStates = new List<AttributeSource.State>();
-            internal AttributeSource.State finalState;
-            internal IEnumerator<AttributeSource.State> it = null;
-            internal SinkFilter filter;
+            private readonly IList<AttributeSource.State> cachedStates = new List<AttributeSource.State>();
+            private AttributeSource.State finalState;
+            private IEnumerator<AttributeSource.State> it = null;
+            private SinkFilter filter;
 
             internal SinkTokenStream(AttributeSource source, SinkFilter filter)
                 : base(source)
@@ -229,7 +229,7 @@ namespace Lucene.Net.Analysis.Sinks
                 this.finalState = finalState;
             }
 
-            public override bool IncrementToken()
+            public override sealed bool IncrementToken()
             {
                 // lazy init the iterator
                 if (it == null)
@@ -245,7 +245,7 @@ namespace Lucene.Net.Analysis.Sinks
                 return true;
             }
 
-            public override void End()
+            public override sealed void End()
             {
                 if (finalState != null)
                 {
@@ -253,7 +253,7 @@ namespace Lucene.Net.Analysis.Sinks
                 }
             }
 
-            public override void Reset()
+            public override sealed void Reset()
             {
                 it = cachedStates.GetEnumerator();
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f3a14dbc/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TokenRangeSinkFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TokenRangeSinkFilter.cs b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TokenRangeSinkFilter.cs
index c9da3d2..4e0d773 100644
--- a/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TokenRangeSinkFilter.cs
+++ b/src/Lucene.Net.Analysis.Common/Analysis/Sinks/TokenRangeSinkFilter.cs
@@ -21,8 +21,6 @@ namespace Lucene.Net.Analysis.Sinks
 
     /// <summary>
     /// Counts the tokens as they go by and saves to the internal list those between the
range of lower and upper, exclusive of upper
-    /// 
-    /// 
     /// </summary>
     public class TokenRangeSinkFilter : TeeSinkTokenFilter.SinkFilter
     {


Mime
View raw message